diff --git a/.codegen.json b/.codegen.json index cd1fa12ed..da4f3dd61 100644 --- a/.codegen.json +++ b/.codegen.json @@ -6,5 +6,8 @@ "batch": { ".codegen/cmds-workspace.go.tmpl": "cmd/workspace/cmd.go", ".codegen/cmds-account.go.tmpl": "cmd/account/cmd.go" + }, + "toolchain": { + "required": ["go"] } } diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha new file mode 100644 index 000000000..b59218d39 --- /dev/null +++ b/.codegen/_openapi_sha @@ -0,0 +1 @@ +09a7fa63d9ae243e5407941f200960ca14d48b07 \ No newline at end of file diff --git a/.codegen/changelog.md.tmpl b/.codegen/changelog.md.tmpl new file mode 100644 index 000000000..018fb1cb7 --- /dev/null +++ b/.codegen/changelog.md.tmpl @@ -0,0 +1,40 @@ +# Version changelog + +## {{.Version}} + +CLI: +{{- range .Changes}} + * {{.}}. +{{- end}} + +Bundles: + * **FILL THIS IN MANUALLY BY MOVING RELEVANT ITEMS FROM ABOVE LIST** + +Internal: + * **FILL THIS IN MANUALLY BY MOVING RELEVANT ITEMS FROM ABOVE LIST** + +{{ if .ApiChanges -}} +API Changes: +{{- range .ApiChanges}}{{if or (eq .X "method") (eq .X "service")}} + * {{.Action}} {{template "what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "what" .}}{{end}}{{end}}. +{{- end}}{{- end}} + +OpenAPI commit {{.Sha}} ({{.Changed}}) +{{- end }} + +{{- if .DependencyUpdates }} +Dependency updates: +{{- range .DependencyUpdates}} + * {{.}}. +{{- end -}} +{{end}} + +## {{.PrevVersion}} + +{{- define "what" -}} + {{if eq .X "service" -}} + `databricks {{if .Service.IsAccounts}}account {{end -}}{{(.Service.TrimPrefix "account").KebabName}}` command group + {{- else if eq .X "method" -}} + `databricks {{if .Method.Service.IsAccounts}}account {{end -}}{{(.Method.Service.TrimPrefix "account").KebabName}} {{.Method.KebabName}}` command + {{- end}} +{{- end -}} diff --git a/.codegen/cmds-account.go.tmpl b/.codegen/cmds-account.go.tmpl index d31959248..f3da7e2c8 100644 --- a/.codegen/cmds-account.go.tmpl +++ b/.codegen/cmds-account.go.tmpl @@ -11,20 +11,21 @@ import ( {{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}} ) -var accountCmd = &cobra.Command{ - Use: "account", - Short: `Databricks Account Commands`, -} - -func init() { - root.RootCmd.AddCommand(accountCmd) +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "account", + Short: `Databricks Account Commands`, + } {{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) -}} - accountCmd.AddCommand({{.SnakeName}}.Cmd) + cmd.AddCommand({{.SnakeName}}.New()) {{end}}{{end}}{{end}} - // Register commands with groups - {{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) -}} - {{.SnakeName}}.Cmd.GroupID = "{{ .Package.Name }}" - {{end}}{{end}}{{end}} + // Register all groups with the parent command. + groups := Groups() + for i := range groups { + cmd.AddGroup(&groups[i]) + } + + return cmd } diff --git a/.codegen/cmds-workspace.go.tmpl b/.codegen/cmds-workspace.go.tmpl index d3da36554..a9daa05d8 100644 --- a/.codegen/cmds-workspace.go.tmpl +++ b/.codegen/cmds-workspace.go.tmpl @@ -2,7 +2,15 @@ package workspace -{{ $excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions" "account-access-control-proxy" }} +{{ $excludes := + list + "command-execution" + "statement-execution" + "dbfs" + "dbsql-permissions" + "account-access-control-proxy" + "files" +}} import ( "github.com/databricks/cli/cmd/root" @@ -10,13 +18,12 @@ import ( {{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}} ) -func init() { +func All() []*cobra.Command { + var out []*cobra.Command + {{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) -}} - root.RootCmd.AddCommand({{.SnakeName}}.Cmd) + out = append(out, {{.SnakeName}}.New()) {{end}}{{end}}{{end}} - // Register commands with groups - {{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) -}} - {{.SnakeName}}.Cmd.GroupID = "{{ .Package.Name }}" - {{end}}{{end}}{{end}} + return out } diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl index 76f4a94ee..4ede142d2 100644 --- a/.codegen/service.go.tmpl +++ b/.codegen/service.go.tmpl @@ -10,7 +10,15 @@ import ( "github.com/spf13/cobra" ) -{{ $excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions" "account-access-control-proxy" }} +{{ $excludes := + list + "command-execution" + "statement-execution" + "dbfs" + "dbsql-permissions" + "account-access-control-proxy" + "files" +}} {{if not (in $excludes .KebabName) }} {{template "service" .}} @@ -19,20 +27,34 @@ import ( {{end}} {{define "service"}} -var Cmd = &cobra.Command{ - Use: "{{(.TrimPrefix "account").KebabName}}", - {{- if .Description }} - Short: `{{.Summary | without "`"}}`, - Long: `{{.Comment " " 80 | without "`"}}`, - {{- end }} - Annotations: map[string]string{ - "package": "{{ .Package.Name }}", - }, - {{- if .IsPrivatePreview }} +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) - // This service is being previewed; hide from help output. - Hidden: true, - {{- end }} +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "{{(.TrimPrefix "account").KebabName}}", + {{- if .Description }} + Short: `{{.Summary | without "`"}}`, + Long: `{{.Comment " " 80 | without "`"}}`, + {{- end }} + GroupID: "{{ .Package.Name }}", + Annotations: map[string]string{ + "package": "{{ .Package.Name }}", + }, + {{- if .IsPrivatePreview }} + + // This service is being previewed; hide from help output. + Hidden: true, + {{- end }} + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } {{- $serviceName := .KebabName -}} @@ -44,26 +66,39 @@ var Cmd = &cobra.Command{ {{end}} // start {{.KebabName}} command -{{- $useJsonForAllFields := or .IsJsonOnly (and .Request (or (not .Request.IsAllRequiredFieldsPrimitive) .Request.HasRequiredNonBodyField)) -}} -{{- $needJsonFlag := or $useJsonForAllFields (and .Request (not .Request.IsOnlyPrimitiveFields)) -}} -{{- if .Request}} -var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} -{{- if $needJsonFlag}} -var {{.CamelName}}Json flags.JsonFlag -{{- end}} -{{end}} -{{if .Wait}}var {{.CamelName}}SkipWait bool -var {{.CamelName}}Timeout time.Duration{{end}} +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var {{.CamelName}}Overrides []func( + *cobra.Command, + {{- if .Request }} + *{{.Service.Package.Name}}.{{.Request.PascalName}}, + {{- end }} +) + +func new{{.PascalName}}() *cobra.Command { + cmd := &cobra.Command{} + + {{- $useJsonForAllFields := or .IsJsonOnly (and .Request (or (not .Request.IsAllRequiredFieldsPrimitive) .Request.HasRequiredNonBodyField)) -}} + {{- $needJsonFlag := or $useJsonForAllFields (and .Request (not .Request.IsOnlyPrimitiveFields)) -}} + + {{- if .Request}} + + var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} + {{- if $needJsonFlag}} + var {{.CamelName}}Json flags.JsonFlag + {{- end}} + {{- end}} + + {{if .Wait}}var {{.CamelName}}SkipWait bool + var {{.CamelName}}Timeout time.Duration{{end}} -func init() { - Cmd.AddCommand({{.CamelName}}Cmd) {{if .Wait}} - {{.CamelName}}Cmd.Flags().BoolVar(&{{.CamelName}}SkipWait, "no-wait", {{.CamelName}}SkipWait, `do not wait to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) - {{.CamelName}}Cmd.Flags().DurationVar(&{{.CamelName}}Timeout, "timeout", {{.Wait.Timeout}}*time.Minute, `maximum amount of time to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) + cmd.Flags().BoolVar(&{{.CamelName}}SkipWait, "no-wait", {{.CamelName}}SkipWait, `do not wait to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) + cmd.Flags().DurationVar(&{{.CamelName}}Timeout, "timeout", {{.Wait.Timeout}}*time.Minute, `maximum amount of time to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) {{end -}} {{if .Request}}// TODO: short flags {{- if $needJsonFlag}} - {{.CamelName}}Cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`) {{- end}} {{$method := .}} {{ if not .IsJsonOnly }} @@ -74,38 +109,39 @@ func init() { {{else if .Entity.ArrayValue }}// TODO: array: {{.Name}} {{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}} {{else if .Entity.IsEmpty }}// TODO: output-only field - {{else if .Entity.Enum }}{{$method.CamelName}}Cmd.Flags().Var(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", `{{.Summary | without "`"}}`) - {{else}}{{$method.CamelName}}Cmd.Flags().{{template "arg-type" .Entity}}(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", {{$method.CamelName}}Req.{{.PascalName}}, `{{.Summary | without "`"}}`) + {{else if .Entity.Enum }}cmd.Flags().Var(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", `{{.Summary | without "`"}}`) + {{else}}cmd.Flags().{{template "arg-type" .Entity}}(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", {{$method.CamelName}}Req.{{.PascalName}}, `{{.Summary | without "`"}}`) {{end}} {{- end -}} {{- end}} {{- end}} {{end}} -} -{{- $excludeFromPrompts := list "workspace get-status" -}} -{{- $fullCommandName := (print $serviceName " " .KebabName) -}} -{{- $noPrompt := or .IsCrudCreate (in $excludeFromPrompts $fullCommandName) }} -{{ $hasPosArgs := and .Request (or .Request.IsAllRequiredFieldsPrimitive (eq .PascalName "RunNow")) -}} -{{- $hasSinglePosArg := and $hasPosArgs (eq 1 (len .Request.RequiredFields)) -}} -{{- $serviceHasNamedIdMap := and (and .Service.List .Service.List.NamedIdMap) (not (eq .PascalName "List")) -}} -{{- $hasIdPrompt := and (not $noPrompt) (and $hasSinglePosArg $serviceHasNamedIdMap) -}} -{{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}} -{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}} -var {{.CamelName}}Cmd = &cobra.Command{ - Use: "{{.KebabName}}{{if $hasPosArgs}}{{range .Request.RequiredFields}} {{.ConstantName}}{{end}}{{end}}", + {{- $excludeFromPrompts := list "workspace get-status" -}} + {{- $fullCommandName := (print $serviceName " " .KebabName) -}} + {{- $noPrompt := or .IsCrudCreate (in $excludeFromPrompts $fullCommandName) }} + + {{- $hasPosArgs := and .Request (or .Request.IsAllRequiredFieldsPrimitive (eq .PascalName "RunNow")) -}} + {{- $hasSinglePosArg := and $hasPosArgs (eq 1 (len .Request.RequiredFields)) -}} + {{- $serviceHasNamedIdMap := and (and .Service.List .Service.List.NamedIdMap) (not (eq .PascalName "List")) -}} + {{- $hasIdPrompt := and (not $noPrompt) (and $hasSinglePosArg $serviceHasNamedIdMap) -}} + {{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}} + {{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}} + + cmd.Use = "{{.KebabName}}{{if $hasPosArgs}}{{range .Request.RequiredFields}} {{.ConstantName}}{{end}}{{end}}" {{- if .Description }} - Short: `{{.Summary | without "`"}}`, - Long: `{{.Comment " " 80 | without "`"}}`, + cmd.Short = `{{.Summary | without "`"}}` + cmd.Long = `{{.Comment " " 80 | without "`"}}` {{- end }} {{- if .IsPrivatePreview }} // This command is being previewed; hide from help output. - Hidden: true, + cmd.Hidden = true {{- end }} - Annotations: map[string]string{},{{if $hasRequiredArgs }} - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + {{if $hasRequiredArgs }} + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs({{len .Request.RequiredFields}}) {{- if $useJsonForAllFields }} if cmd.Flags().Changed("json") { @@ -113,9 +149,10 @@ var {{.CamelName}}Cmd = &cobra.Command{ } {{- end }} return check(cmd, args) - },{{end}} - PreRunE: root.Must{{if .Service.IsAccounts}}Account{{else}}Workspace{{end}}Client, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + {{end}} + cmd.PreRunE = root.Must{{if .Service.IsAccounts}}Account{{else}}Workspace{{end}}Client + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() {{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}} {{- if .Request }} @@ -204,10 +241,24 @@ var {{.CamelName}}Cmd = &cobra.Command{ {{- else -}} {{template "method-call" .}} {{end -}} - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range {{.CamelName}}Overrides { + fn(cmd{{if .Request}}, &{{.CamelName}}Req{{end}}) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(new{{.PascalName}}()) + }) } {{end}} // end service {{.Name}}{{end}} diff --git a/.gitattributes b/.gitattributes index 8b95da207..3209a0f3f 100755 --- a/.gitattributes +++ b/.gitattributes @@ -10,6 +10,7 @@ cmd/account/ip-access-lists/ip-access-lists.go linguist-generated=true cmd/account/log-delivery/log-delivery.go linguist-generated=true cmd/account/metastore-assignments/metastore-assignments.go linguist-generated=true cmd/account/metastores/metastores.go linguist-generated=true +cmd/account/network-policy/network-policy.go linguist-generated=true cmd/account/networks/networks.go linguist-generated=true cmd/account/o-auth-enrollment/o-auth-enrollment.go linguist-generated=true cmd/account/private-access/private-access.go linguist-generated=true @@ -24,12 +25,15 @@ cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true cmd/account/workspaces/workspaces.go linguist-generated=true cmd/workspace/alerts/alerts.go linguist-generated=true +cmd/workspace/artifact-allowlists/artifact-allowlists.go linguist-generated=true cmd/workspace/catalogs/catalogs.go linguist-generated=true +cmd/workspace/clean-rooms/clean-rooms.go linguist-generated=true cmd/workspace/cluster-policies/cluster-policies.go linguist-generated=true cmd/workspace/clusters/clusters.go linguist-generated=true cmd/workspace/cmd.go linguist-generated=true cmd/workspace/connections/connections.go linguist-generated=true cmd/workspace/current-user/current-user.go linguist-generated=true +cmd/workspace/dashboard-widgets/dashboard-widgets.go linguist-generated=true cmd/workspace/dashboards/dashboards.go linguist-generated=true cmd/workspace/data-sources/data-sources.go linguist-generated=true cmd/workspace/experiments/experiments.go linguist-generated=true @@ -46,14 +50,17 @@ cmd/workspace/jobs/jobs.go linguist-generated=true cmd/workspace/libraries/libraries.go linguist-generated=true cmd/workspace/metastores/metastores.go linguist-generated=true cmd/workspace/model-registry/model-registry.go linguist-generated=true +cmd/workspace/model-versions/model-versions.go linguist-generated=true cmd/workspace/permissions/permissions.go linguist-generated=true cmd/workspace/pipelines/pipelines.go linguist-generated=true cmd/workspace/policy-families/policy-families.go linguist-generated=true cmd/workspace/providers/providers.go linguist-generated=true cmd/workspace/queries/queries.go linguist-generated=true cmd/workspace/query-history/query-history.go linguist-generated=true +cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true cmd/workspace/recipients/recipients.go linguist-generated=true +cmd/workspace/registered-models/registered-models.go linguist-generated=true cmd/workspace/repos/repos.go linguist-generated=true cmd/workspace/schemas/schemas.go linguist-generated=true cmd/workspace/secrets/secrets.go linguist-generated=true diff --git a/.github/workflows/publish-latest.yml b/.github/workflows/publish-latest.yml deleted file mode 100644 index 921edfd39..000000000 --- a/.github/workflows/publish-latest.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: publish-latest - -on: - workflow_dispatch: - - workflow_call: - -jobs: - publish: - runs-on: ubuntu-22.04 - - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - ref: release-s3 - - - name: Install s3cmd - run: | - sudo apt-get update - sudo apt-get install s3cmd - - - name: Publish to S3 - working-directory: ./scripts - run: ./publish_to_s3.sh - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 7e1bc2437..f0fa2ee68 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -3,8 +3,16 @@ name: build on: pull_request: types: [opened, synchronize] + merge_group: + types: [checks_requested] push: - branches: [main] + # Always run on push to main. The build cache can only be reused + # if it was saved by a run from the repository's default branch. + # The run result will be identical to that from the merge queue + # because the commit is identical, yet we need to perform it to + # seed the build cache. + branches: + - main jobs: tests: @@ -20,16 +28,15 @@ jobs: steps: - name: Checkout repository and submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Unshallow run: git fetch --prune --unshallow - name: Setup Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: - go-version: 1.19.5 - cache: true + go-version: 1.21.0 - name: Set go env run: | @@ -39,7 +46,9 @@ jobs: go install honnef.co/go/tools/cmd/staticcheck@latest - name: Pull external libraries - run: make vendor + run: | + make vendor + pip3 install wheel - name: Run tests run: make test @@ -49,14 +58,12 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: - # Use 1.19 because of godoc formatting. - # See https://tip.golang.org/doc/go1.19#go-doc. - go-version: 1.19 + go-version: 1.21.0 # No need to download cached dependencies when running gofmt. cache: false diff --git a/.github/workflows/release-snapshot.yml b/.github/workflows/release-snapshot.yml index 3acb6b5a5..c3398a2bd 100644 --- a/.github/workflows/release-snapshot.yml +++ b/.github/workflows/release-snapshot.yml @@ -13,32 +13,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository and submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Unshallow run: git fetch --prune --unshallow - name: Setup Go - id: go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: - go-version: 1.19.5 - - - name: Locate cache paths - id: cache - run: | - echo "GOMODCACHE=$(go env GOMODCACHE)" >> $GITHUB_OUTPUT - echo "GOCACHE=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - # Note: use custom caching because below performs a cross platform build - # through goreleaser and don't want to share a cache with the test builds. - - name: Setup caching - uses: actions/cache@v3 - with: - path: | - ${{ steps.cache.outputs.GOMODCACHE }} - ${{ steps.cache.outputs.GOCACHE }} - key: release-${{ runner.os }}-go-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', '.goreleaser.yaml') }} + go-version: 1.21.0 - name: Hide snapshot tag to outsmart GoReleaser run: git tag -d snapshot || true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cb4847cae..cbab15ecc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,32 +12,15 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository and submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Unshallow run: git fetch --prune --unshallow - name: Setup Go - id: go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: - go-version: 1.19.5 - - - name: Locate cache paths - id: cache - run: | - echo "GOMODCACHE=$(go env GOMODCACHE)" >> $GITHUB_OUTPUT - echo "GOCACHE=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - # Note: use custom caching because below performs a cross platform build - # through goreleaser and don't want to share a cache with the test builds. - - name: Setup caching - uses: actions/cache@v3 - with: - path: | - ${{ steps.cache.outputs.GOMODCACHE }} - ${{ steps.cache.outputs.GOCACHE }} - key: release-${{ runner.os }}-go-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', '.goreleaser.yaml') }} + go-version: 1.21.0 - name: Run GoReleaser uses: goreleaser/goreleaser-action@v4 @@ -46,8 +29,3 @@ jobs: args: release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - publish: - uses: ./.github/workflows/publish-latest.yml - needs: goreleaser - secrets: inherit diff --git a/.gitignore b/.gitignore index 5f00a82b3..edd1409ae 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,6 @@ __pycache__ .terraform.lock.hcl .vscode/launch.json +.vscode/tasks.json + +.databricks diff --git a/.vscode/__builtins__.pyi b/.vscode/__builtins__.pyi new file mode 100644 index 000000000..81f9a49ec --- /dev/null +++ b/.vscode/__builtins__.pyi @@ -0,0 +1,3 @@ +# Typings for Pylance in VS Code +# see https://github.com/microsoft/pyright/blob/main/docs/builtins.md +from databricks.sdk.runtime import * diff --git a/.vscode/settings.json b/.vscode/settings.json index 76be94afa..869465286 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -7,5 +7,8 @@ "files.insertFinalNewline": true, "files.trimFinalNewlines": true, "python.envFile": "${workspaceFolder}/.databricks/.databricks.env", - "databricks.python.envFile": "${workspaceFolder}/.env" + "databricks.python.envFile": "${workspaceFolder}/.env", + "python.analysis.stubPath": ".vscode", + "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", + "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a7ed72bd..867e086be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,294 @@ # Version changelog +## 0.205.0 + +This release marks the public preview phase of Databricks Asset Bundles. + +For more information, please refer to our online documentation at +https://docs.databricks.com/en/dev-tools/bundles/. + +CLI: + * Prompt once for a client profile ([#727](https://github.com/databricks/cli/pull/727)). + +Bundles: + * Use clearer error message when no interpolation value is found. ([#764](https://github.com/databricks/cli/pull/764)). + * Use interactive prompt to select resource to run if not specified ([#762](https://github.com/databricks/cli/pull/762)). + * Add documentation link bundle command group description ([#770](https://github.com/databricks/cli/pull/770)). + + +## 0.204.1 + +Bundles: + * Fix conversion of job parameters ([#744](https://github.com/databricks/cli/pull/744)). + * Add schema and config validation to jsonschema package ([#740](https://github.com/databricks/cli/pull/740)). + * Support Model Serving Endpoints in bundles ([#682](https://github.com/databricks/cli/pull/682)). + * Do not include empty output in job run output ([#749](https://github.com/databricks/cli/pull/749)). + * Fixed marking libraries from DBFS as remote ([#750](https://github.com/databricks/cli/pull/750)). + * Process only Python wheel tasks which have local libraries used ([#751](https://github.com/databricks/cli/pull/751)). + * Add enum support for bundle templates ([#668](https://github.com/databricks/cli/pull/668)). + * Apply Python wheel trampoline if workspace library is used ([#755](https://github.com/databricks/cli/pull/755)). + * List available targets when incorrect target passed ([#756](https://github.com/databricks/cli/pull/756)). + * Make bundle and sync fields optional ([#757](https://github.com/databricks/cli/pull/757)). + * Consolidate environment variable interaction ([#747](https://github.com/databricks/cli/pull/747)). + +Internal: + * Update Go SDK to v0.19.1 ([#759](https://github.com/databricks/cli/pull/759)). + + + +## 0.204.0 + +This release includes permission related commands for a subset of workspace +services where they apply. These complement the `permissions` command and +do not require specification of the object type to work with, as that is +implied by the command they are nested under. + +CLI: + * Group permission related commands ([#730](https://github.com/databricks/cli/pull/730)). + +Bundles: + * Fixed artifact file uploading on Windows and wheel execution on DBR 13.3 ([#722](https://github.com/databricks/cli/pull/722)). + * Make resource and artifact paths in bundle config relative to config folder ([#708](https://github.com/databricks/cli/pull/708)). + * Add support for ordering of input prompts ([#662](https://github.com/databricks/cli/pull/662)). + * Fix IsServicePrincipal() only working for workspace admins ([#732](https://github.com/databricks/cli/pull/732)). + * databricks bundle init template v1 ([#686](https://github.com/databricks/cli/pull/686)). + * databricks bundle init template v2: optional stubs, DLT support ([#700](https://github.com/databricks/cli/pull/700)). + * Show 'databricks bundle init' template in CLI prompt ([#725](https://github.com/databricks/cli/pull/725)). + * Include in set of environment variables to pass along. ([#736](https://github.com/databricks/cli/pull/736)). + +Internal: + * Update Go SDK to v0.19.0 ([#729](https://github.com/databricks/cli/pull/729)). + * Replace API call to test configuration with dummy authenticate call ([#728](https://github.com/databricks/cli/pull/728)). + +API Changes: + * Changed `databricks account storage-credentials create` command to return . + * Changed `databricks account storage-credentials get` command to return . + * Changed `databricks account storage-credentials list` command to return . + * Changed `databricks account storage-credentials update` command to return . + * Changed `databricks connections create` command with new required argument order. + * Changed `databricks connections update` command with new required argument order. + * Changed `databricks volumes create` command with new required argument order. + * Added `databricks artifact-allowlists` command group. + * Added `databricks model-versions` command group. + * Added `databricks registered-models` command group. + * Added `databricks cluster-policies get-permission-levels` command. + * Added `databricks cluster-policies get-permissions` command. + * Added `databricks cluster-policies set-permissions` command. + * Added `databricks cluster-policies update-permissions` command. + * Added `databricks clusters get-permission-levels` command. + * Added `databricks clusters get-permissions` command. + * Added `databricks clusters set-permissions` command. + * Added `databricks clusters update-permissions` command. + * Added `databricks instance-pools get-permission-levels` command. + * Added `databricks instance-pools get-permissions` command. + * Added `databricks instance-pools set-permissions` command. + * Added `databricks instance-pools update-permissions` command. + * Added `databricks files` command group. + * Changed `databricks permissions set` command to start returning . + * Changed `databricks permissions update` command to start returning . + * Added `databricks users get-permission-levels` command. + * Added `databricks users get-permissions` command. + * Added `databricks users set-permissions` command. + * Added `databricks users update-permissions` command. + * Added `databricks jobs get-permission-levels` command. + * Added `databricks jobs get-permissions` command. + * Added `databricks jobs set-permissions` command. + * Added `databricks jobs update-permissions` command. + * Changed `databricks experiments get-by-name` command to return . + * Changed `databricks experiments get-experiment` command to return . + * Added `databricks experiments delete-runs` command. + * Added `databricks experiments get-permission-levels` command. + * Added `databricks experiments get-permissions` command. + * Added `databricks experiments restore-runs` command. + * Added `databricks experiments set-permissions` command. + * Added `databricks experiments update-permissions` command. + * Added `databricks model-registry get-permission-levels` command. + * Added `databricks model-registry get-permissions` command. + * Added `databricks model-registry set-permissions` command. + * Added `databricks model-registry update-permissions` command. + * Added `databricks pipelines get-permission-levels` command. + * Added `databricks pipelines get-permissions` command. + * Added `databricks pipelines set-permissions` command. + * Added `databricks pipelines update-permissions` command. + * Added `databricks serving-endpoints get-permission-levels` command. + * Added `databricks serving-endpoints get-permissions` command. + * Added `databricks serving-endpoints set-permissions` command. + * Added `databricks serving-endpoints update-permissions` command. + * Added `databricks token-management get-permission-levels` command. + * Added `databricks token-management get-permissions` command. + * Added `databricks token-management set-permissions` command. + * Added `databricks token-management update-permissions` command. + * Changed `databricks dashboards create` command with new required argument order. + * Added `databricks warehouses get-permission-levels` command. + * Added `databricks warehouses get-permissions` command. + * Added `databricks warehouses set-permissions` command. + * Added `databricks warehouses update-permissions` command. + * Added `databricks dashboard-widgets` command group. + * Added `databricks query-visualizations` command group. + * Added `databricks repos get-permission-levels` command. + * Added `databricks repos get-permissions` command. + * Added `databricks repos set-permissions` command. + * Added `databricks repos update-permissions` command. + * Added `databricks secrets get-secret` command. + * Added `databricks workspace get-permission-levels` command. + * Added `databricks workspace get-permissions` command. + * Added `databricks workspace set-permissions` command. + * Added `databricks workspace update-permissions` command. + +OpenAPI commit 09a7fa63d9ae243e5407941f200960ca14d48b07 (2023-09-04) + +## 0.203.3 + +Bundles: + * Support cluster overrides with cluster_key and compute_key ([#696](https://github.com/databricks/cli/pull/696)). + * Allow referencing local Python wheels without artifacts section defined ([#703](https://github.com/databricks/cli/pull/703)). + * Fixed --environment flag ([#705](https://github.com/databricks/cli/pull/705)). + * Correctly identify local paths in libraries section ([#702](https://github.com/databricks/cli/pull/702)). + * Fixed path joining in FindFilesWithSuffixInPath ([#704](https://github.com/databricks/cli/pull/704)). + * Added transformation mutator for Python wheel task for them to work on DBR <13.1 ([#635](https://github.com/databricks/cli/pull/635)). + +Internal: + * Add a foundation for built-in templates ([#685](https://github.com/databricks/cli/pull/685)). + * Test transform when no Python wheel tasks defined ([#714](https://github.com/databricks/cli/pull/714)). + * Pin Terraform binary version to 1.5.5 ([#715](https://github.com/databricks/cli/pull/715)). + * Cleanup after "Add a foundation for built-in templates" ([#707](https://github.com/databricks/cli/pull/707)). + * Filter down to Python wheel tasks only for trampoline ([#712](https://github.com/databricks/cli/pull/712)). + * Update Terraform provider schema structs from 1.23.0 ([#713](https://github.com/databricks/cli/pull/713)). + +## 0.203.2 + +CLI: + * Added `databricks account o-auth-enrollment enable` command ([#687](https://github.com/databricks/cli/pull/687)). + +Bundles: + * Do not try auto detect Python package if no Python wheel tasks defined ([#674](https://github.com/databricks/cli/pull/674)). + * Renamed `environments` to `targets` in bundle configuration ([#670](https://github.com/databricks/cli/pull/670)). + * Rename init project-dir flag to output-dir ([#676](https://github.com/databricks/cli/pull/676)). + * Added support for sync.include and sync.exclude sections ([#671](https://github.com/databricks/cli/pull/671)). + * Add template directory flag for bundle templates ([#675](https://github.com/databricks/cli/pull/675)). + * Never ignore root directory when enumerating files in a repository ([#683](https://github.com/databricks/cli/pull/683)). + * Improve 'mode' error message ([#681](https://github.com/databricks/cli/pull/681)). + * Added run_as section for bundle configuration ([#692](https://github.com/databricks/cli/pull/692)). + +## 0.203.1 + +CLI: + * Always resolve .databrickscfg file ([#659](https://github.com/databricks/cli/pull/659)). + +Bundles: + * Add internal tag for bundle fields to be skipped from schema ([#636](https://github.com/databricks/cli/pull/636)). + * Log the bundle root configuration file if applicable ([#657](https://github.com/databricks/cli/pull/657)). + * Execute paths without the .tmpl extension as templates ([#654](https://github.com/databricks/cli/pull/654)). + * Enable environment overrides for job clusters ([#658](https://github.com/databricks/cli/pull/658)). + * Merge artifacts and resources block with overrides enabled ([#660](https://github.com/databricks/cli/pull/660)). + * Locked terraform binary version to <= 1.5.5 ([#666](https://github.com/databricks/cli/pull/666)). + * Return better error messages for invalid JSON schema types in templates ([#661](https://github.com/databricks/cli/pull/661)). + * Use custom prompter for bundle template inputs ([#663](https://github.com/databricks/cli/pull/663)). + * Add map and pair helper functions for bundle templates ([#665](https://github.com/databricks/cli/pull/665)). + * Correct name for force acquire deploy flag ([#656](https://github.com/databricks/cli/pull/656)). + * Confirm that override with a zero value doesn't work ([#669](https://github.com/databricks/cli/pull/669)). + +Internal: + * Consolidate functions in libs/git ([#652](https://github.com/databricks/cli/pull/652)). + * Upgraded Go version to 1.21 ([#664](https://github.com/databricks/cli/pull/664)). + +## 0.203.0 + +CLI: + * Infer host from profile during `auth login` ([#629](https://github.com/databricks/cli/pull/629)). + +Bundles: + * Extend deployment mode support ([#577](https://github.com/databricks/cli/pull/577)). + * Add validation for Git settings in bundles ([#578](https://github.com/databricks/cli/pull/578)). + * Only treat files with .tmpl extension as templates ([#594](https://github.com/databricks/cli/pull/594)). + * Add JSON schema validation for input template parameters ([#598](https://github.com/databricks/cli/pull/598)). + * Add DATABRICKS_BUNDLE_INCLUDE_PATHS to specify include paths through env vars ([#591](https://github.com/databricks/cli/pull/591)). + * Initialise a empty default bundle if BUNDLE_ROOT and DATABRICKS_BUNDLE_INCLUDES env vars are present ([#604](https://github.com/databricks/cli/pull/604)). + * Regenerate bundle resource structs from latest Terraform provider ([#633](https://github.com/databricks/cli/pull/633)). + * Fixed processing jobs libraries with remote path ([#638](https://github.com/databricks/cli/pull/638)). + * Add unit test for file name execution during rendering ([#640](https://github.com/databricks/cli/pull/640)). + * Add bundle init command and support for prompting user for input values ([#631](https://github.com/databricks/cli/pull/631)). + * Fix bundle git branch validation ([#645](https://github.com/databricks/cli/pull/645)). + +Internal: + * Fix mkdir integration test on GCP ([#620](https://github.com/databricks/cli/pull/620)). + * Fix git clone integration test for non-existing repo ([#610](https://github.com/databricks/cli/pull/610)). + * Remove push to main trigger for build workflow ([#621](https://github.com/databricks/cli/pull/621)). + * Remove workflow to publish binaries to S3 ([#622](https://github.com/databricks/cli/pull/622)). + * Fix failing fs mkdir test on azure ([#627](https://github.com/databricks/cli/pull/627)). + * Print y/n options when displaying prompts using cmdio.Ask ([#650](https://github.com/databricks/cli/pull/650)). + +API Changes: + * Changed `databricks account metastore-assignments create` command to not return anything. + * Added `databricks account network-policy` command group. + +OpenAPI commit 7b57ba3a53f4de3d049b6a24391fe5474212daf8 (2023-07-28) + +Dependency updates: + * Bump OpenAPI specification & Go SDK Version ([#624](https://github.com/databricks/cli/pull/624)). + * Bump golang.org/x/term from 0.10.0 to 0.11.0 ([#643](https://github.com/databricks/cli/pull/643)). + * Bump golang.org/x/text from 0.11.0 to 0.12.0 ([#642](https://github.com/databricks/cli/pull/642)). + * Bump golang.org/x/oauth2 from 0.10.0 to 0.11.0 ([#641](https://github.com/databricks/cli/pull/641)). + +## 0.202.0 + +Breaking Change: + * Require include glob patterns to be explicitly defined ([#602](https://github.com/databricks/cli/pull/602)). + +Bundles: + * Add support for more SDK config options ([#587](https://github.com/databricks/cli/pull/587)). + * Add template renderer for Databricks templates ([#589](https://github.com/databricks/cli/pull/589)). + * Fix formatting in renderer.go ([#593](https://github.com/databricks/cli/pull/593)). + * Fixed python wheel test ([#608](https://github.com/databricks/cli/pull/608)). + * Auto detect Python wheel packages and infer build command ([#603](https://github.com/databricks/cli/pull/603)). + * Added support for artifacts building for bundles ([#583](https://github.com/databricks/cli/pull/583)). + * Add support for cloning repositories ([#544](https://github.com/databricks/cli/pull/544)). + * Add regexp compile helper function for templates ([#601](https://github.com/databricks/cli/pull/601)). + * Add unit test that raw strings are printed as is ([#599](https://github.com/databricks/cli/pull/599)). + +Internal: + * Fix tests under ./cmd/configure if DATABRICKS_TOKEN is set ([#605](https://github.com/databricks/cli/pull/605)). + * Remove dependency on global state in generated commands ([#595](https://github.com/databricks/cli/pull/595)). + * Remove dependency on global state for the root command ([#606](https://github.com/databricks/cli/pull/606)). + * Add merge_group trigger for build ([#612](https://github.com/databricks/cli/pull/612)). + * Added support for build command chaining and error on missing wheel ([#607](https://github.com/databricks/cli/pull/607)). + * Add TestAcc prefix to filer test and fix any failing tests ([#611](https://github.com/databricks/cli/pull/611)). + * Add url parse helper function for templates ([#600](https://github.com/databricks/cli/pull/600)). + * Remove dependency on global state for remaining commands ([#613](https://github.com/databricks/cli/pull/613)). + * Update CHANGELOG template ([#588](https://github.com/databricks/cli/pull/588)). + + + +## 0.201.0 + +CLI: + * Support tab completion for profiles ([#572](https://github.com/databricks/cli/pull/572)). + * Improve auth login experience ([#570](https://github.com/databricks/cli/pull/570)). + * Integrate with auto-release infra ([#581](https://github.com/databricks/cli/pull/581)). + +Bundles: + * Add development runs ([#522](https://github.com/databricks/cli/pull/522)). + * Correctly use --profile flag passed for all bundle commands ([#571](https://github.com/databricks/cli/pull/571)). + * Disallow notebooks in paths where files are expected ([#573](https://github.com/databricks/cli/pull/573)). + * Remove base path checks during sync ([#576](https://github.com/databricks/cli/pull/576)). + * First look for databricks.yml before falling back to bundle.yml ([#580](https://github.com/databricks/cli/pull/580)). + +API Changes: + * Removed `databricks metastores maintenance` command. + * Added `databricks metastores enable-optimization` command. + * Added `databricks tables update` command. + * Changed `databricks account settings delete-personal-compute-setting` command with new required argument order. + * Changed `databricks account settings read-personal-compute-setting` command with new required argument order. + * Added `databricks clean-rooms` command group. + +OpenAPI commit 850a075ed9758d21a6bc4409506b48c8b9f93ab4 (2023-07-18) + +Dependency updates: + * Bump golang.org/x/term from 0.9.0 to 0.10.0 ([#567](https://github.com/databricks/cli/pull/567)). + * Bump golang.org/x/oauth2 from 0.9.0 to 0.10.0 ([#566](https://github.com/databricks/cli/pull/566)). + * Bump golang.org/x/mod from 0.11.0 to 0.12.0 ([#568](https://github.com/databricks/cli/pull/568)). + * Bump github.com/databricks/databricks-sdk-go from 0.12.0 to 0.13.0 ([#585](https://github.com/databricks/cli/pull/585)). + ## 0.200.2 CLI: diff --git a/bundle/artifacts/all.go b/bundle/artifacts/all.go index b6a3f7dc6..1a1661e5f 100644 --- a/bundle/artifacts/all.go +++ b/bundle/artifacts/all.go @@ -4,9 +4,10 @@ import ( "context" "fmt" + "slices" + "github.com/databricks/cli/bundle" "golang.org/x/exp/maps" - "golang.org/x/exp/slices" ) // all is an internal proxy for producing a list of mutators for all artifacts. diff --git a/bundle/artifacts/artifacts.go b/bundle/artifacts/artifacts.go new file mode 100644 index 000000000..0331adb70 --- /dev/null +++ b/bundle/artifacts/artifacts.go @@ -0,0 +1,168 @@ +package artifacts + +import ( + "context" + "crypto/sha256" + "encoding/base64" + "errors" + "fmt" + "os" + "path" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/workspace" +) + +type mutatorFactory = func(name string) bundle.Mutator + +var buildMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{ + config.ArtifactPythonWheel: whl.Build, +} + +var uploadMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{} + +func getBuildMutator(t config.ArtifactType, name string) bundle.Mutator { + mutatorFactory, ok := buildMutators[t] + if !ok { + mutatorFactory = BasicBuild + } + + return mutatorFactory(name) +} + +func getUploadMutator(t config.ArtifactType, name string) bundle.Mutator { + mutatorFactory, ok := uploadMutators[t] + if !ok { + mutatorFactory = BasicUpload + } + + return mutatorFactory(name) +} + +// Basic Build defines a general build mutator which builds artifact based on artifact.BuildCommand +type basicBuild struct { + name string +} + +func BasicBuild(name string) bundle.Mutator { + return &basicBuild{name: name} +} + +func (m *basicBuild) Name() string { + return fmt.Sprintf("artifacts.Build(%s)", m.name) +} + +func (m *basicBuild) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Build(%s): Building...", m.name)) + + out, err := artifact.Build(ctx) + if err != nil { + return fmt.Errorf("artifacts.Build(%s): %w, output: %s", m.name, err, out) + } + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Build(%s): Build succeeded", m.name)) + + return nil +} + +// Basic Upload defines a general upload mutator which uploads artifact as a library to workspace +type basicUpload struct { + name string +} + +func BasicUpload(name string) bundle.Mutator { + return &basicUpload{name: name} +} + +func (m *basicUpload) Name() string { + return fmt.Sprintf("artifacts.Build(%s)", m.name) +} + +func (m *basicUpload) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + if len(artifact.Files) == 0 { + return fmt.Errorf("artifact source is not configured: %s", m.name) + } + + err := uploadArtifact(ctx, artifact, b) + if err != nil { + return fmt.Errorf("artifacts.Upload(%s): %w", m.name, err) + } + + return nil +} + +func uploadArtifact(ctx context.Context, a *config.Artifact, b *bundle.Bundle) error { + for i := range a.Files { + f := &a.Files[i] + if f.NeedsUpload() { + filename := filepath.Base(f.Source) + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Upload(%s): Uploading...", filename)) + remotePath, err := uploadArtifactFile(ctx, f.Source, b) + if err != nil { + return err + } + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Upload(%s): Upload succeeded", filename)) + + f.RemotePath = remotePath + } + } + + a.NormalisePaths() + return nil +} + +// Function to upload artifact file to Workspace +func uploadArtifactFile(ctx context.Context, file string, b *bundle.Bundle) (string, error) { + raw, err := os.ReadFile(file) + if err != nil { + return "", fmt.Errorf("unable to read %s: %w", file, errors.Unwrap(err)) + } + + uploadPath, err := getUploadBasePath(b) + if err != nil { + return "", err + } + + fileHash := sha256.Sum256(raw) + remotePath := path.Join(uploadPath, fmt.Sprintf("%x", fileHash), filepath.Base(file)) + // Make sure target directory exists. + err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, path.Dir(remotePath)) + if err != nil { + return "", fmt.Errorf("unable to create directory for %s: %w", remotePath, err) + } + + // Import to workspace. + err = b.WorkspaceClient().Workspace.Import(ctx, workspace.Import{ + Path: remotePath, + Overwrite: true, + Format: workspace.ImportFormatAuto, + Content: base64.StdEncoding.EncodeToString(raw), + }) + if err != nil { + return "", fmt.Errorf("unable to import %s: %w", remotePath, err) + } + + return remotePath, nil +} + +func getUploadBasePath(b *bundle.Bundle) (string, error) { + artifactPath := b.Config.Workspace.ArtifactsPath + if artifactPath == "" { + return "", fmt.Errorf("remote artifact path not configured") + } + + return path.Join(artifactPath, ".internal"), nil +} diff --git a/bundle/artifacts/artifacts_test.go b/bundle/artifacts/artifacts_test.go new file mode 100644 index 000000000..bbae44efa --- /dev/null +++ b/bundle/artifacts/artifacts_test.go @@ -0,0 +1,123 @@ +package artifacts + +import ( + "context" + "os" + "path/filepath" + "regexp" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/stretchr/testify/require" +) + +func touchEmptyFile(t *testing.T, path string) { + err := os.MkdirAll(filepath.Dir(path), 0700) + require.NoError(t, err) + f, err := os.Create(path) + require.NoError(t, err) + f.Close() +} + +type MockWorkspaceService struct { +} + +// Delete implements workspace.WorkspaceService. +func (MockWorkspaceService) Delete(ctx context.Context, request workspace.Delete) error { + panic("unimplemented") +} + +// Export implements workspace.WorkspaceService. +func (MockWorkspaceService) Export(ctx context.Context, request workspace.ExportRequest) (*workspace.ExportResponse, error) { + panic("unimplemented") +} + +// GetStatus implements workspace.WorkspaceService. +func (MockWorkspaceService) GetStatus(ctx context.Context, request workspace.GetStatusRequest) (*workspace.ObjectInfo, error) { + panic("unimplemented") +} + +// Import implements workspace.WorkspaceService. +func (MockWorkspaceService) Import(ctx context.Context, request workspace.Import) error { + return nil +} + +// List implements workspace.WorkspaceService. +func (MockWorkspaceService) List(ctx context.Context, request workspace.ListWorkspaceRequest) (*workspace.ListResponse, error) { + panic("unimplemented") +} + +// Mkdirs implements workspace.WorkspaceService. +func (MockWorkspaceService) Mkdirs(ctx context.Context, request workspace.Mkdirs) error { + return nil +} + +// GetPermissionLevels implements workspace.WorkspaceService. +func (MockWorkspaceService) GetPermissionLevels( + ctx context.Context, + request workspace.GetWorkspaceObjectPermissionLevelsRequest, +) (*workspace.GetWorkspaceObjectPermissionLevelsResponse, error) { + panic("unimplemented") +} + +// GetPermissions implements workspace.WorkspaceService. +func (MockWorkspaceService) GetPermissions( + ctx context.Context, + request workspace.GetWorkspaceObjectPermissionsRequest, +) (*workspace.WorkspaceObjectPermissions, error) { + panic("unimplemented") +} + +// SetPermissions implements workspace.WorkspaceService. +func (MockWorkspaceService) SetPermissions( + ctx context.Context, + request workspace.WorkspaceObjectPermissionsRequest, +) (*workspace.WorkspaceObjectPermissions, error) { + panic("unimplemented") +} + +// UpdatePermissions implements workspace.WorkspaceService. +func (MockWorkspaceService) UpdatePermissions( + ctx context.Context, + request workspace.WorkspaceObjectPermissionsRequest, +) (*workspace.WorkspaceObjectPermissions, error) { + panic("unimplemented") +} + +func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { + dir := t.TempDir() + whlPath := filepath.Join(dir, "dist", "test.whl") + touchEmptyFile(t, whlPath) + b := &bundle.Bundle{ + Config: config.Root{ + Path: dir, + Bundle: config.Bundle{ + Target: "whatever", + }, + Workspace: config.Workspace{ + ArtifactsPath: "/Users/test@databricks.com/whatever", + }, + }, + } + + b.WorkspaceClient().Workspace.WithImpl(MockWorkspaceService{}) + artifact := &config.Artifact{ + Type: "whl", + Files: []config.ArtifactFile{ + { + Source: whlPath, + Libraries: []*compute.Library{ + {Whl: "dist\\test.whl"}, + }, + }, + }, + } + + err := uploadArtifact(context.Background(), artifact, b) + require.NoError(t, err) + require.Regexp(t, regexp.MustCompile("/Users/test@databricks.com/whatever/.internal/[a-z0-9]+/test.whl"), artifact.Files[0].RemotePath) + require.Regexp(t, regexp.MustCompile("/Workspace/Users/test@databricks.com/whatever/.internal/[a-z0-9]+/test.whl"), artifact.Files[0].Libraries[0].Whl) +} diff --git a/bundle/artifacts/autodetect.go b/bundle/artifacts/autodetect.go new file mode 100644 index 000000000..6e80ef0b6 --- /dev/null +++ b/bundle/artifacts/autodetect.go @@ -0,0 +1,33 @@ +package artifacts + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/libs/log" +) + +func DetectPackages() bundle.Mutator { + return &autodetect{} +} + +type autodetect struct { +} + +func (m *autodetect) Name() string { + return "artifacts.DetectPackages" +} + +func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) error { + // If artifacts section explicitly defined, do not try to auto detect packages + if b.Config.Artifacts != nil { + log.Debugf(ctx, "artifacts block is defined, skipping auto-detecting") + return nil + } + + return bundle.Apply(ctx, b, bundle.Seq( + whl.DetectPackage(), + whl.DefineArtifactsFromLibraries(), + )) +} diff --git a/bundle/artifacts/build.go b/bundle/artifacts/build.go index 294351f41..6b1aac822 100644 --- a/bundle/artifacts/build.go +++ b/bundle/artifacts/build.go @@ -3,9 +3,9 @@ package artifacts import ( "context" "fmt" + "path/filepath" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/artifacts/notebook" ) func BuildAll() bundle.Mutator { @@ -33,9 +33,24 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - if artifact.Notebook != nil { - return bundle.Apply(ctx, b, notebook.Build(m.name)) + // Skip building if build command is not specified or infered + if artifact.BuildCommand == "" { + // If no build command was specified or infered and there is no + // artifact output files specified, artifact is misconfigured + if len(artifact.Files) == 0 { + return fmt.Errorf("misconfigured artifact: please specify 'build' or 'files' property") + } + return nil } - return nil + // If artifact path is not provided, use bundle root dir + if artifact.Path == "" { + artifact.Path = b.Config.Path + } + + if !filepath.IsAbs(artifact.Path) { + artifact.Path = filepath.Join(b.Config.Path, artifact.Path) + } + + return bundle.Apply(ctx, b, getBuildMutator(artifact.Type, m.name)) } diff --git a/bundle/artifacts/infer.go b/bundle/artifacts/infer.go new file mode 100644 index 000000000..ade5def51 --- /dev/null +++ b/bundle/artifacts/infer.go @@ -0,0 +1,64 @@ +package artifacts + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/bundle/config" +) + +var inferMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{ + config.ArtifactPythonWheel: whl.InferBuildCommand, +} + +func getInferMutator(t config.ArtifactType, name string) bundle.Mutator { + mutatorFactory, ok := inferMutators[t] + if !ok { + return nil + } + + return mutatorFactory(name) +} + +func InferMissingProperties() bundle.Mutator { + return &all{ + name: "infer", + fn: inferArtifactByName, + } +} + +func inferArtifactByName(name string) (bundle.Mutator, error) { + return &infer{name}, nil +} + +type infer struct { + name string +} + +func (m *infer) Name() string { + return fmt.Sprintf("artifacts.Infer(%s)", m.name) +} + +func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + // only try to infer command if it's not already defined + // and there is no explicitly files defined which means + // that the package is built outside of bundle cycles + // manually by customer + if artifact.BuildCommand != "" || len(artifact.Files) > 0 { + return nil + } + + inferMutator := getInferMutator(artifact.Type, m.name) + if inferMutator != nil { + return bundle.Apply(ctx, b, inferMutator) + } + + return nil +} diff --git a/bundle/artifacts/notebook/build.go b/bundle/artifacts/notebook/build.go deleted file mode 100644 index 4a25868a9..000000000 --- a/bundle/artifacts/notebook/build.go +++ /dev/null @@ -1,81 +0,0 @@ -package notebook - -import ( - "context" - "errors" - "fmt" - "os" - "path" - "path/filepath" - "strings" - - "github.com/databricks/cli/bundle" - "github.com/databricks/databricks-sdk-go/service/workspace" -) - -type build struct { - name string -} - -func Build(name string) bundle.Mutator { - return &build{ - name: name, - } -} - -func (m *build) Name() string { - return fmt.Sprintf("notebook.Build(%s)", m.name) -} - -func (m *build) Apply(_ context.Context, b *bundle.Bundle) error { - a, ok := b.Config.Artifacts[m.name] - if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) - } - - artifact := a.Notebook - - // Check if the filetype is supported. - switch ext := strings.ToLower(filepath.Ext(artifact.Path)); ext { - case ".py": - artifact.Language = workspace.LanguagePython - case ".scala": - artifact.Language = workspace.LanguageScala - case ".sql": - artifact.Language = workspace.LanguageSql - default: - return fmt.Errorf("invalid notebook extension: %s", ext) - } - - // Open underlying file. - f, err := os.Open(filepath.Join(b.Config.Path, artifact.Path)) - if err != nil { - return fmt.Errorf("unable to open artifact file %s: %w", artifact.Path, errors.Unwrap(err)) - } - defer f.Close() - - // Check that the file contains the notebook marker on its first line. - ok, err = hasMarker(artifact.Language, f) - if err != nil { - return fmt.Errorf("unable to read artifact file %s: %s", artifact.Path, errors.Unwrap(err)) - } - if !ok { - return fmt.Errorf("notebook marker not found in %s", artifact.Path) - } - - // Check that an artifact path is defined. - remotePath := b.Config.Workspace.ArtifactsPath - if remotePath == "" { - return fmt.Errorf("remote artifact path not configured") - } - - // Store absolute paths. - artifact.LocalPath = filepath.Join(b.Config.Path, artifact.Path) - artifact.RemotePath = path.Join(remotePath, stripExtension(artifact.Path)) - return nil -} - -func stripExtension(path string) string { - ext := filepath.Ext(path) - return path[0 : len(path)-len(ext)] -} diff --git a/bundle/artifacts/notebook/marker.go b/bundle/artifacts/notebook/marker.go deleted file mode 100644 index a04ca9895..000000000 --- a/bundle/artifacts/notebook/marker.go +++ /dev/null @@ -1,29 +0,0 @@ -package notebook - -import ( - "bufio" - "io" - "strings" - - "github.com/databricks/databricks-sdk-go/service/workspace" -) - -func hasMarker(l workspace.Language, r io.Reader) (bool, error) { - scanner := bufio.NewScanner(r) - ok := scanner.Scan() - if !ok { - return false, scanner.Err() - } - - line := strings.TrimSpace(scanner.Text()) - switch l { - case workspace.LanguagePython: - return line == "# Databricks notebook source", nil - case workspace.LanguageScala: - return line == "// Databricks notebook source", nil - case workspace.LanguageSql: - return line == "-- Databricks notebook source", nil - default: - panic("language not handled: " + l) - } -} diff --git a/bundle/artifacts/notebook/upload.go b/bundle/artifacts/notebook/upload.go deleted file mode 100644 index 38ac9d615..000000000 --- a/bundle/artifacts/notebook/upload.go +++ /dev/null @@ -1,60 +0,0 @@ -package notebook - -import ( - "context" - "encoding/base64" - "errors" - "fmt" - "os" - "path" - - "github.com/databricks/cli/bundle" - "github.com/databricks/databricks-sdk-go/service/workspace" -) - -type upload struct { - name string -} - -func Upload(name string) bundle.Mutator { - return &upload{ - name: name, - } -} - -func (m *upload) Name() string { - return fmt.Sprintf("notebook.Upload(%s)", m.name) -} - -func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { - a, ok := b.Config.Artifacts[m.name] - if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) - } - - artifact := a.Notebook - raw, err := os.ReadFile(artifact.LocalPath) - if err != nil { - return fmt.Errorf("unable to read %s: %w", m.name, errors.Unwrap(err)) - } - - // Make sure target directory exists. - err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, path.Dir(artifact.RemotePath)) - if err != nil { - return fmt.Errorf("unable to create directory for %s: %w", m.name, err) - } - - // Import to workspace. - err = b.WorkspaceClient().Workspace.Import(ctx, workspace.Import{ - Path: artifact.RemotePath, - Overwrite: true, - Format: workspace.ImportFormatSource, - Language: artifact.Language, - Content: base64.StdEncoding.EncodeToString(raw), - }) - if err != nil { - return fmt.Errorf("unable to import %s: %w", m.name, err) - } - - return nil -} diff --git a/bundle/artifacts/upload.go b/bundle/artifacts/upload.go index f5ce2b23e..990718aa4 100644 --- a/bundle/artifacts/upload.go +++ b/bundle/artifacts/upload.go @@ -5,7 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/artifacts/notebook" + "github.com/databricks/databricks-sdk-go/service/workspace" ) func UploadAll() bundle.Mutator { @@ -15,6 +15,10 @@ func UploadAll() bundle.Mutator { } } +func CleanUp() bundle.Mutator { + return &cleanUp{} +} + type upload struct { name string } @@ -33,8 +37,33 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - if artifact.Notebook != nil { - return bundle.Apply(ctx, b, notebook.Upload(m.name)) + if len(artifact.Files) == 0 { + return fmt.Errorf("artifact source is not configured: %s", m.name) + } + + return bundle.Apply(ctx, b, getUploadMutator(artifact.Type, m.name)) +} + +type cleanUp struct{} + +func (m *cleanUp) Name() string { + return "artifacts.CleanUp" +} + +func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) error { + uploadPath, err := getUploadBasePath(b) + if err != nil { + return err + } + + b.WorkspaceClient().Workspace.Delete(ctx, workspace.Delete{ + Path: uploadPath, + Recursive: true, + }) + + err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, uploadPath) + if err != nil { + return fmt.Errorf("unable to create directory for %s: %w", uploadPath, err) } return nil diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go new file mode 100644 index 000000000..29031e86d --- /dev/null +++ b/bundle/artifacts/whl/autodetect.go @@ -0,0 +1,81 @@ +package whl + +import ( + "context" + "fmt" + "os" + "path/filepath" + "regexp" + "time" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" +) + +type detectPkg struct { +} + +func DetectPackage() bundle.Mutator { + return &detectPkg{} +} + +func (m *detectPkg) Name() string { + return "artifacts.whl.AutoDetect" +} + +func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { + wheelTasks := libraries.FindAllWheelTasksWithLocalLibraries(b) + if len(wheelTasks) == 0 { + log.Infof(ctx, "No local wheel tasks in databricks.yml config, skipping auto detect") + return nil + } + cmdio.LogString(ctx, "artifacts.whl.AutoDetect: Detecting Python wheel project...") + + // checking if there is setup.py in the bundle root + setupPy := filepath.Join(b.Config.Path, "setup.py") + _, err := os.Stat(setupPy) + if err != nil { + cmdio.LogString(ctx, "artifacts.whl.AutoDetect: No Python wheel project found at bundle root folder") + return nil + } + + cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.AutoDetect: Found Python wheel project at %s", b.Config.Path)) + module := extractModuleName(setupPy) + + if b.Config.Artifacts == nil { + b.Config.Artifacts = make(map[string]*config.Artifact) + } + + pkgPath, err := filepath.Abs(b.Config.Path) + if err != nil { + return err + } + b.Config.Artifacts[module] = &config.Artifact{ + Path: pkgPath, + Type: config.ArtifactPythonWheel, + } + + return nil +} + +func extractModuleName(setupPy string) string { + bytes, err := os.ReadFile(setupPy) + if err != nil { + return randomName() + } + + content := string(bytes) + r := regexp.MustCompile(`name=['"](.*)['"]`) + matches := r.FindStringSubmatch(content) + if len(matches) == 0 { + return randomName() + } + return matches[1] +} + +func randomName() string { + return fmt.Sprintf("artifact%d", time.Now().Unix()) +} diff --git a/bundle/artifacts/whl/autodetect_test.go b/bundle/artifacts/whl/autodetect_test.go new file mode 100644 index 000000000..b53289b2a --- /dev/null +++ b/bundle/artifacts/whl/autodetect_test.go @@ -0,0 +1,22 @@ +package whl + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExtractModuleName(t *testing.T) { + moduleName := extractModuleName("./testdata/setup.py") + assert.Equal(t, "my_test_code", moduleName) +} + +func TestExtractModuleNameMinimal(t *testing.T) { + moduleName := extractModuleName("./testdata/setup_minimal.py") + assert.Equal(t, "my_test_code", moduleName) +} + +func TestExtractModuleNameIncorrect(t *testing.T) { + moduleName := extractModuleName("./testdata/setup_incorrect.py") + assert.Contains(t, moduleName, "artifact") +} diff --git a/bundle/artifacts/whl/build.go b/bundle/artifacts/whl/build.go new file mode 100644 index 000000000..4565a4c80 --- /dev/null +++ b/bundle/artifacts/whl/build.go @@ -0,0 +1,60 @@ +package whl + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/python" +) + +type build struct { + name string +} + +func Build(name string) bundle.Mutator { + return &build{ + name: name, + } +} + +func (m *build) Name() string { + return fmt.Sprintf("artifacts.whl.Build(%s)", m.name) +} + +func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.Build(%s): Building...", m.name)) + + dir := artifact.Path + + distPath := filepath.Join(dir, "dist") + os.RemoveAll(distPath) + python.CleanupWheelFolder(dir) + + out, err := artifact.Build(ctx) + if err != nil { + return fmt.Errorf("artifacts.whl.Build(%s): Failed %w, output: %s", m.name, err, out) + } + cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.Build(%s): Build succeeded", m.name)) + + wheels := python.FindFilesWithSuffixInPath(distPath, ".whl") + if len(wheels) == 0 { + return fmt.Errorf("artifacts.whl.Build(%s): cannot find built wheel in %s", m.name, dir) + } + for _, wheel := range wheels { + artifact.Files = append(artifact.Files, config.ArtifactFile{ + Source: wheel, + }) + } + + return nil +} diff --git a/bundle/artifacts/whl/from_libraries.go b/bundle/artifacts/whl/from_libraries.go new file mode 100644 index 000000000..9d35f6314 --- /dev/null +++ b/bundle/artifacts/whl/from_libraries.go @@ -0,0 +1,56 @@ +package whl + +import ( + "context" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/log" +) + +type fromLibraries struct{} + +func DefineArtifactsFromLibraries() bundle.Mutator { + return &fromLibraries{} +} + +func (m *fromLibraries) Name() string { + return "artifacts.whl.DefineArtifactsFromLibraries" +} + +func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) error { + if len(b.Config.Artifacts) != 0 { + log.Debugf(ctx, "Skipping defining artifacts from libraries because artifacts section is explicitly defined") + return nil + } + + tasks := libraries.FindAllWheelTasksWithLocalLibraries(b) + for _, task := range tasks { + for _, lib := range task.Libraries { + matches, err := filepath.Glob(filepath.Join(b.Config.Path, lib.Whl)) + // File referenced from libraries section does not exists, skipping + if err != nil { + continue + } + + for _, match := range matches { + name := filepath.Base(match) + if b.Config.Artifacts == nil { + b.Config.Artifacts = make(map[string]*config.Artifact) + } + + log.Debugf(ctx, "Adding an artifact block for %s", match) + b.Config.Artifacts[name] = &config.Artifact{ + Files: []config.ArtifactFile{ + {Source: match}, + }, + Type: config.ArtifactPythonWheel, + } + } + } + } + + return nil +} diff --git a/bundle/artifacts/whl/infer.go b/bundle/artifacts/whl/infer.go new file mode 100644 index 000000000..518d926ca --- /dev/null +++ b/bundle/artifacts/whl/infer.go @@ -0,0 +1,34 @@ +package whl + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/python" +) + +type infer struct { + name string +} + +func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact := b.Config.Artifacts[m.name] + py, err := python.DetectExecutable(ctx) + if err != nil { + return err + } + artifact.BuildCommand = fmt.Sprintf("%s setup.py bdist_wheel", py) + + return nil +} + +func (m *infer) Name() string { + return fmt.Sprintf("artifacts.whl.Infer(%s)", m.name) +} + +func InferBuildCommand(name string) bundle.Mutator { + return &infer{ + name: name, + } +} diff --git a/bundle/artifacts/whl/testdata/setup.py b/bundle/artifacts/whl/testdata/setup.py new file mode 100644 index 000000000..7a1317b2f --- /dev/null +++ b/bundle/artifacts/whl/testdata/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + name="my_test_code", + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group_1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/artifacts/whl/testdata/setup_incorrect.py b/bundle/artifacts/whl/testdata/setup_incorrect.py new file mode 100644 index 000000000..c6aa17b2d --- /dev/null +++ b/bundle/artifacts/whl/testdata/setup_incorrect.py @@ -0,0 +1,14 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group_1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/artifacts/whl/testdata/setup_minimal.py b/bundle/artifacts/whl/testdata/setup_minimal.py new file mode 100644 index 000000000..3e81e7217 --- /dev/null +++ b/bundle/artifacts/whl/testdata/setup_minimal.py @@ -0,0 +1,3 @@ +from setuptools import setup + +setup(name="my_test_code") diff --git a/bundle/bundle.go b/bundle/bundle.go index 02d0eaac9..61bf1ffe4 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -7,21 +7,26 @@ package bundle import ( + "context" "fmt" "os" "path/filepath" "sync" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/folders" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/locker" + "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/terraform" "github.com/databricks/databricks-sdk-go" sdkconfig "github.com/databricks/databricks-sdk-go/config" "github.com/hashicorp/terraform-exec/tfexec" ) +const internalFolder = ".internal" + type Bundle struct { Config config.Root @@ -43,9 +48,30 @@ type Bundle struct { AutoApprove bool } -func Load(path string) (*Bundle, error) { +func Load(ctx context.Context, path string) (*Bundle, error) { bundle := &Bundle{} - err := bundle.Config.Load(filepath.Join(path, config.FileName)) + stat, err := os.Stat(path) + if err != nil { + return nil, err + } + configFile, err := config.FileNames.FindInPath(path) + if err != nil { + _, hasRootEnv := env.Root(ctx) + _, hasIncludesEnv := env.Includes(ctx) + if hasRootEnv && hasIncludesEnv && stat.IsDir() { + log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path) + bundle.Config = config.Root{ + Path: path, + Bundle: config.Bundle{ + Name: filepath.Base(path), + }, + } + return bundle, nil + } + return nil, err + } + log.Debugf(ctx, "Loading bundle configuration from: %s", configFile) + err = bundle.Config.Load(configFile) if err != nil { return nil, err } @@ -54,20 +80,20 @@ func Load(path string) (*Bundle, error) { // MustLoad returns a bundle configuration. // It returns an error if a bundle was not found or could not be loaded. -func MustLoad() (*Bundle, error) { - root, err := mustGetRoot() +func MustLoad(ctx context.Context) (*Bundle, error) { + root, err := mustGetRoot(ctx) if err != nil { return nil, err } - return Load(root) + return Load(ctx, root) } // TryLoad returns a bundle configuration if there is one, but doesn't fail if there isn't one. // It returns an error if a bundle was found but could not be loaded. // It returns a `nil` bundle if a bundle was not found. -func TryLoad() (*Bundle, error) { - root, err := tryGetRoot() +func TryLoad(ctx context.Context) (*Bundle, error) { + root, err := tryGetRoot(ctx) if err != nil { return nil, err } @@ -77,7 +103,7 @@ func TryLoad() (*Bundle, error) { return nil, nil } - return Load(root) + return Load(ctx, root) } func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient { @@ -92,14 +118,13 @@ func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient { } // CacheDir returns directory to use for temporary files for this bundle. -// Scoped to the bundle's environment. -func (b *Bundle) CacheDir(paths ...string) (string, error) { - if b.Config.Bundle.Environment == "" { - panic("environment not set") +// Scoped to the bundle's target. +func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error) { + if b.Config.Bundle.Target == "" { + panic("target not set") } - cacheDirName, exists := os.LookupEnv("DATABRICKS_BUNDLE_TMP") - + cacheDirName, exists := env.TempDir(ctx) if !exists || cacheDirName == "" { cacheDirName = filepath.Join( // Anchor at bundle root directory. @@ -113,8 +138,8 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) { // Fixed components of the result path. parts := []string{ cacheDirName, - // Scope with environment name. - b.Config.Bundle.Environment, + // Scope with target name. + b.Config.Bundle.Target, } // Append dynamic components of the result path. @@ -130,6 +155,38 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) { return dir, nil } +// This directory is used to store and automaticaly sync internal bundle files, such as, f.e +// notebook trampoline files for Python wheel and etc. +func (b *Bundle) InternalDir(ctx context.Context) (string, error) { + cacheDir, err := b.CacheDir(ctx) + if err != nil { + return "", err + } + + dir := filepath.Join(cacheDir, internalFolder) + err = os.MkdirAll(dir, 0700) + if err != nil { + return dir, err + } + + return dir, nil +} + +// GetSyncIncludePatterns returns a list of user defined includes +// And also adds InternalDir folder to include list for sync command +// so this folder is always synced +func (b *Bundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) { + internalDir, err := b.InternalDir(ctx) + if err != nil { + return nil, err + } + internalDirRel, err := filepath.Rel(b.Config.Path, internalDir) + if err != nil { + return nil, err + } + return append(b.Config.Sync.Include, filepath.ToSlash(filepath.Join(internalDirRel, "*.*"))), nil +} + func (b *Bundle) GitRepository() (*git.Repository, error) { rootPath, err := folders.FindDirWithLeaf(b.Config.Path, ".git") if err != nil { diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index 5a26d3508..43477efd1 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -1,108 +1,112 @@ package bundle import ( + "context" "os" "path/filepath" "testing" + "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestLoadNotExists(t *testing.T) { - b, err := Load("/doesntexist") + b, err := Load(context.Background(), "/doesntexist") assert.True(t, os.IsNotExist(err)) assert.Nil(t, b) } func TestLoadExists(t *testing.T) { - b, err := Load("./tests/basic") + b, err := Load(context.Background(), "./tests/basic") require.Nil(t, err) assert.Equal(t, "basic", b.Config.Bundle.Name) } func TestBundleCacheDir(t *testing.T) { + ctx := context.Background() projectDir := t.TempDir() - f1, err := os.Create(filepath.Join(projectDir, "bundle.yml")) + f1, err := os.Create(filepath.Join(projectDir, "databricks.yml")) require.NoError(t, err) f1.Close() - bundle, err := Load(projectDir) + bundle, err := Load(ctx, projectDir) require.NoError(t, err) - // Artificially set environment. - // This is otherwise done by [mutators.SelectEnvironment]. - bundle.Config.Bundle.Environment = "default" + // Artificially set target. + // This is otherwise done by [mutators.SelectTarget]. + bundle.Config.Bundle.Target = "default" // unset env variable in case it's set t.Setenv("DATABRICKS_BUNDLE_TMP", "") - cacheDir, err := bundle.CacheDir() + cacheDir, err := bundle.CacheDir(ctx) - // format is /.databricks/bundle/ + // format is /.databricks/bundle/ assert.NoError(t, err) assert.Equal(t, filepath.Join(projectDir, ".databricks", "bundle", "default"), cacheDir) } func TestBundleCacheDirOverride(t *testing.T) { + ctx := context.Background() projectDir := t.TempDir() bundleTmpDir := t.TempDir() - f1, err := os.Create(filepath.Join(projectDir, "bundle.yml")) + f1, err := os.Create(filepath.Join(projectDir, "databricks.yml")) require.NoError(t, err) f1.Close() - bundle, err := Load(projectDir) + bundle, err := Load(ctx, projectDir) require.NoError(t, err) - // Artificially set environment. - // This is otherwise done by [mutators.SelectEnvironment]. - bundle.Config.Bundle.Environment = "default" + // Artificially set target. + // This is otherwise done by [mutators.SelectTarget]. + bundle.Config.Bundle.Target = "default" // now we expect to use 'bundleTmpDir' instead of CWD/.databricks/bundle t.Setenv("DATABRICKS_BUNDLE_TMP", bundleTmpDir) - cacheDir, err := bundle.CacheDir() + cacheDir, err := bundle.CacheDir(ctx) - // format is / + // format is / assert.NoError(t, err) assert.Equal(t, filepath.Join(bundleTmpDir, "default"), cacheDir) } func TestBundleMustLoadSuccess(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/basic") - b, err := MustLoad() + t.Setenv(env.RootVariable, "./tests/basic") + b, err := MustLoad(context.Background()) require.NoError(t, err) assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) } func TestBundleMustLoadFailureWithEnv(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/doesntexist") - _, err := MustLoad() + t.Setenv(env.RootVariable, "./tests/doesntexist") + _, err := MustLoad(context.Background()) require.Error(t, err, "not a directory") } func TestBundleMustLoadFailureIfNotFound(t *testing.T) { chdir(t, t.TempDir()) - _, err := MustLoad() + _, err := MustLoad(context.Background()) require.Error(t, err, "unable to find bundle root") } func TestBundleTryLoadSuccess(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/basic") - b, err := TryLoad() + t.Setenv(env.RootVariable, "./tests/basic") + b, err := TryLoad(context.Background()) require.NoError(t, err) assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) } func TestBundleTryLoadFailureWithEnv(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/doesntexist") - _, err := TryLoad() + t.Setenv(env.RootVariable, "./tests/doesntexist") + _, err := TryLoad(context.Background()) require.Error(t, err, "not a directory") } func TestBundleTryLoadOkIfNotFound(t *testing.T) { chdir(t, t.TempDir()) - b, err := TryLoad() + b, err := TryLoad(context.Background()) assert.NoError(t, err) assert.Nil(t, b) } diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index f782fcfcd..d7048a02e 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -1,20 +1,101 @@ package config -import "github.com/databricks/databricks-sdk-go/service/workspace" +import ( + "bytes" + "context" + "fmt" + "os/exec" + "path" + "strings" + + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/compute" +) + +type Artifacts map[string]*Artifact + +func (artifacts Artifacts) SetConfigFilePath(path string) { + for _, artifact := range artifacts { + artifact.ConfigFilePath = path + } +} + +type ArtifactType string + +const ArtifactPythonWheel ArtifactType = `whl` + +type ArtifactFile struct { + Source string `json:"source"` + RemotePath string `json:"-" bundle:"readonly"` + Libraries []*compute.Library `json:"-" bundle:"readonly"` +} // Artifact defines a single local code artifact that can be // built/uploaded/referenced in the context of this bundle. type Artifact struct { - Notebook *NotebookArtifact `json:"notebook,omitempty"` -} + Type ArtifactType `json:"type"` -type NotebookArtifact struct { + // The local path to the directory with a root of artifact, + // for example, where setup.py is for Python projects Path string `json:"path"` - // Language is detected during build step. - Language workspace.Language `json:"language,omitempty" bundle:"readonly"` + // The relative or absolute path to the built artifact files + // (Python wheel, Java jar and etc) itself + Files []ArtifactFile `json:"files"` + BuildCommand string `json:"build"` - // Paths are synthesized during build step. - LocalPath string `json:"local_path,omitempty" bundle:"readonly"` - RemotePath string `json:"remote_path,omitempty" bundle:"readonly"` + paths.Paths +} + +func (a *Artifact) Build(ctx context.Context) ([]byte, error) { + if a.BuildCommand == "" { + return nil, fmt.Errorf("no build property defined") + } + + out := make([][]byte, 0) + commands := strings.Split(a.BuildCommand, " && ") + for _, command := range commands { + buildParts := strings.Split(command, " ") + cmd := exec.CommandContext(ctx, buildParts[0], buildParts[1:]...) + cmd.Dir = a.Path + res, err := cmd.CombinedOutput() + if err != nil { + return res, err + } + out = append(out, res) + } + return bytes.Join(out, []byte{}), nil +} + +func (a *Artifact) NormalisePaths() { + for _, f := range a.Files { + // If no libraries attached, nothing to normalise, skipping + if f.Libraries == nil { + continue + } + + wsfsBase := "/Workspace" + remotePath := path.Join(wsfsBase, f.RemotePath) + for i := range f.Libraries { + lib := f.Libraries[i] + if lib.Whl != "" { + lib.Whl = remotePath + continue + } + if lib.Jar != "" { + lib.Jar = remotePath + continue + } + } + + } +} + +// This function determines if artifact files needs to be uploaded. +// During the bundle processing we analyse which library uses which artifact file. +// If artifact file is used as a library, we store the reference to this library in artifact file Libraries field. +// If artifact file has libraries it's been used in, it means than we need to upload this file. +// Otherwise this artifact file is not used and we skip uploading +func (af *ArtifactFile) NeedsUpload() bool { + return af.Libraries != nil } diff --git a/bundle/config/bundle.go b/bundle/config/bundle.go index cf3864775..d444f5077 100644 --- a/bundle/config/bundle.go +++ b/bundle/config/bundle.go @@ -15,7 +15,10 @@ type Bundle struct { // Default warehouse to run SQL on. // DefaultWarehouse string `json:"default_warehouse,omitempty"` - // Environment is set by the mutator that selects the environment. + // Target is set by the mutator that selects the target. + Target string `json:"target,omitempty" bundle:"readonly"` + + // DEPRECATED. Left for backward compatibility with Target Environment string `json:"environment,omitempty" bundle:"readonly"` // Terraform holds configuration related to Terraform. @@ -25,14 +28,17 @@ type Bundle struct { // Lock configures locking behavior on deployment. Lock Lock `json:"lock" bundle:"readonly"` + // Force-override Git branch validation. + Force bool `json:"force" bundle:"readonly"` + // Contains Git information like current commit, current branch and // origin url. Automatically loaded by reading .git directory if not specified Git Git `json:"git,omitempty"` - // Determines the mode of the environment. + // Determines the mode of the target. // For example, 'mode: development' can be used for deployments for // development purposes. - // Annotated readonly as this should be set at the environment level. + // Annotated readonly as this should be set at the target level. Mode Mode `json:"mode,omitempty" bundle:"readonly"` // Overrides the compute used for jobs and other supported assets. diff --git a/bundle/config/environment.go b/bundle/config/environment.go deleted file mode 100644 index 06a8d8909..000000000 --- a/bundle/config/environment.go +++ /dev/null @@ -1,38 +0,0 @@ -package config - -type Mode string - -// Environment defines overrides for a single environment. -// This structure is recursively merged into the root configuration. -type Environment struct { - // Default marks that this environment must be used if one isn't specified - // by the user (through environment variable or command line argument). - Default bool `json:"default,omitempty"` - - // Determines the mode of the environment. - // For example, 'mode: development' can be used for deployments for - // development purposes. - Mode Mode `json:"mode,omitempty"` - - // Overrides the compute used for jobs and other supported assets. - ComputeID string `json:"compute_id,omitempty"` - - Bundle *Bundle `json:"bundle,omitempty"` - - Workspace *Workspace `json:"workspace,omitempty"` - - Artifacts map[string]*Artifact `json:"artifacts,omitempty"` - - Resources *Resources `json:"resources,omitempty"` - - // Override default values for defined variables - // Does not permit defining new variables or redefining existing ones - // in the scope of an environment - Variables map[string]string `json:"variables,omitempty"` -} - -const ( - // Right now, we just have a default / "" mode and a "development" mode. - // Additional modes are expected to come for pull-requests and production. - Development Mode = "development" -) diff --git a/bundle/config/experimental.go b/bundle/config/experimental.go new file mode 100644 index 000000000..be0e7d8fe --- /dev/null +++ b/bundle/config/experimental.go @@ -0,0 +1,18 @@ +package config + +type Experimental struct { + Scripts map[ScriptHook]Command `json:"scripts,omitempty"` +} + +type Command string +type ScriptHook string + +// These hook names are subject to change and currently experimental +const ( + ScriptPreInit ScriptHook = "preinit" + ScriptPostInit ScriptHook = "postinit" + ScriptPreBuild ScriptHook = "prebuild" + ScriptPostBuild ScriptHook = "postbuild" + ScriptPreDeploy ScriptHook = "predeploy" + ScriptPostDeploy ScriptHook = "postdeploy" +) diff --git a/bundle/config/git.go b/bundle/config/git.go index 7ada8dfbc..760134a86 100644 --- a/bundle/config/git.go +++ b/bundle/config/git.go @@ -4,4 +4,10 @@ type Git struct { Branch string `json:"branch,omitempty"` OriginURL string `json:"origin_url,omitempty"` Commit string `json:"commit,omitempty" bundle:"readonly"` + + // Inferred is set to true if the Git details were inferred and weren't set explicitly + Inferred bool `json:"-" bundle:"readonly"` + + // The actual branch according to Git (may be different from the configured branch) + ActualBranch string `json:"-" bundle:"readonly"` } diff --git a/bundle/config/interpolation/interpolation.go b/bundle/config/interpolation/interpolation.go index bf19804a0..8ba0b8b1f 100644 --- a/bundle/config/interpolation/interpolation.go +++ b/bundle/config/interpolation/interpolation.go @@ -9,10 +9,11 @@ import ( "sort" "strings" + "slices" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/variable" "golang.org/x/exp/maps" - "golang.org/x/exp/slices" ) const Delimiter = "." @@ -183,7 +184,7 @@ func (a *accumulator) Resolve(path string, seenPaths []string, fns ...LookupFunc // fetch the string node to resolve field, ok := a.strings[path] if !ok { - return fmt.Errorf("could not resolve reference %s", path) + return fmt.Errorf("no value found for interpolation reference: ${%s}", path) } // return early if the string field has no variables to interpolate diff --git a/bundle/config/interpolation/interpolation_test.go b/bundle/config/interpolation/interpolation_test.go index 83254c9b0..cccb6dc71 100644 --- a/bundle/config/interpolation/interpolation_test.go +++ b/bundle/config/interpolation/interpolation_test.go @@ -247,5 +247,5 @@ func TestInterpolationInvalidVariableReference(t *testing.T) { } err := expand(&config) - assert.ErrorContains(t, err, "could not resolve reference vars.foo") + assert.ErrorContains(t, err, "no value found for interpolation reference: ${vars.foo}") } diff --git a/bundle/config/interpolation/lookup.go b/bundle/config/interpolation/lookup.go index 932d739e2..3dc5047a7 100644 --- a/bundle/config/interpolation/lookup.go +++ b/bundle/config/interpolation/lookup.go @@ -3,9 +3,8 @@ package interpolation import ( "errors" "fmt" + "slices" "strings" - - "golang.org/x/exp/slices" ) // LookupFunction returns the value to rewrite a path expression to. diff --git a/bundle/config/mutator/default_environment.go b/bundle/config/mutator/default_environment.go deleted file mode 100644 index 1598a647d..000000000 --- a/bundle/config/mutator/default_environment.go +++ /dev/null @@ -1,37 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" -) - -type defineDefaultEnvironment struct { - name string -} - -// DefineDefaultEnvironment adds an environment named "default" -// to the configuration if none have been defined. -func DefineDefaultEnvironment() bundle.Mutator { - return &defineDefaultEnvironment{ - name: "default", - } -} - -func (m *defineDefaultEnvironment) Name() string { - return fmt.Sprintf("DefineDefaultEnvironment(%s)", m.name) -} - -func (m *defineDefaultEnvironment) Apply(_ context.Context, b *bundle.Bundle) error { - // Nothing to do if the configuration has at least 1 environment. - if len(b.Config.Environments) > 0 { - return nil - } - - // Define default environment. - b.Config.Environments = make(map[string]*config.Environment) - b.Config.Environments[m.name] = &config.Environment{} - return nil -} diff --git a/bundle/config/mutator/default_include.go b/bundle/config/mutator/default_include.go deleted file mode 100644 index baf052968..000000000 --- a/bundle/config/mutator/default_include.go +++ /dev/null @@ -1,36 +0,0 @@ -package mutator - -import ( - "context" - - "github.com/databricks/cli/bundle" - "golang.org/x/exp/slices" -) - -type defineDefaultInclude struct { - include []string -} - -// DefineDefaultInclude sets the list of includes to a default if it hasn't been set. -func DefineDefaultInclude() bundle.Mutator { - return &defineDefaultInclude{ - // When we support globstar we can collapse below into a single line. - include: []string{ - // Load YAML files in the same directory. - "*.yml", - // Load YAML files in subdirectories. - "*/*.yml", - }, - } -} - -func (m *defineDefaultInclude) Name() string { - return "DefineDefaultInclude" -} - -func (m *defineDefaultInclude) Apply(_ context.Context, b *bundle.Bundle) error { - if len(b.Config.Include) == 0 { - b.Config.Include = slices.Clone(m.include) - } - return nil -} diff --git a/bundle/config/mutator/default_include_test.go b/bundle/config/mutator/default_include_test.go deleted file mode 100644 index ac1c1d4ef..000000000 --- a/bundle/config/mutator/default_include_test.go +++ /dev/null @@ -1,18 +0,0 @@ -package mutator_test - -import ( - "context" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDefaultInclude(t *testing.T) { - bundle := &bundle.Bundle{} - err := mutator.DefineDefaultInclude().Apply(context.Background(), bundle) - require.NoError(t, err) - assert.Equal(t, []string{"*.yml", "*/*.yml"}, bundle.Config.Include) -} diff --git a/bundle/config/mutator/default_target.go b/bundle/config/mutator/default_target.go new file mode 100644 index 000000000..d5318a3e2 --- /dev/null +++ b/bundle/config/mutator/default_target.go @@ -0,0 +1,37 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" +) + +type defineDefaultTarget struct { + name string +} + +// DefineDefaultTarget adds a target named "default" +// to the configuration if none have been defined. +func DefineDefaultTarget() bundle.Mutator { + return &defineDefaultTarget{ + name: "default", + } +} + +func (m *defineDefaultTarget) Name() string { + return fmt.Sprintf("DefineDefaultTarget(%s)", m.name) +} + +func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) error { + // Nothing to do if the configuration has at least 1 target. + if len(b.Config.Targets) > 0 { + return nil + } + + // Define default target. + b.Config.Targets = make(map[string]*config.Target) + b.Config.Targets[m.name] = &config.Target{} + return nil +} diff --git a/bundle/config/mutator/default_environment_test.go b/bundle/config/mutator/default_target_test.go similarity index 51% rename from bundle/config/mutator/default_environment_test.go rename to bundle/config/mutator/default_target_test.go index f196e5bae..49fbe6de2 100644 --- a/bundle/config/mutator/default_environment_test.go +++ b/bundle/config/mutator/default_target_test.go @@ -11,25 +11,25 @@ import ( "github.com/stretchr/testify/require" ) -func TestDefaultEnvironment(t *testing.T) { +func TestDefaultTarget(t *testing.T) { bundle := &bundle.Bundle{} - err := mutator.DefineDefaultEnvironment().Apply(context.Background(), bundle) + err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle) require.NoError(t, err) - env, ok := bundle.Config.Environments["default"] + env, ok := bundle.Config.Targets["default"] assert.True(t, ok) - assert.Equal(t, &config.Environment{}, env) + assert.Equal(t, &config.Target{}, env) } -func TestDefaultEnvironmentAlreadySpecified(t *testing.T) { +func TestDefaultTargetAlreadySpecified(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ - Environments: map[string]*config.Environment{ + Targets: map[string]*config.Target{ "development": {}, }, }, } - err := mutator.DefineDefaultEnvironment().Apply(context.Background(), bundle) + err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle) require.NoError(t, err) - _, ok := bundle.Config.Environments["default"] + _, ok := bundle.Config.Targets["default"] assert.False(t, ok) } diff --git a/bundle/config/mutator/default_workspace_root.go b/bundle/config/mutator/default_workspace_root.go index bf51eda9e..260a59584 100644 --- a/bundle/config/mutator/default_workspace_root.go +++ b/bundle/config/mutator/default_workspace_root.go @@ -27,14 +27,14 @@ func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle return fmt.Errorf("unable to define default workspace root: bundle name not defined") } - if b.Config.Bundle.Environment == "" { - return fmt.Errorf("unable to define default workspace root: bundle environment not selected") + if b.Config.Bundle.Target == "" { + return fmt.Errorf("unable to define default workspace root: bundle target not selected") } b.Config.Workspace.RootPath = fmt.Sprintf( "~/.bundle/%s/%s", b.Config.Bundle.Name, - b.Config.Bundle.Environment, + b.Config.Bundle.Target, ) return nil } diff --git a/bundle/config/mutator/default_workspace_root_test.go b/bundle/config/mutator/default_workspace_root_test.go index 4a78e6e5c..1822dca0f 100644 --- a/bundle/config/mutator/default_workspace_root_test.go +++ b/bundle/config/mutator/default_workspace_root_test.go @@ -15,8 +15,8 @@ func TestDefaultWorkspaceRoot(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ - Name: "name", - Environment: "environment", + Name: "name", + Target: "environment", }, }, } diff --git a/bundle/config/mutator/expand_workspace_root_test.go b/bundle/config/mutator/expand_workspace_root_test.go index e872dc835..0ec11a07d 100644 --- a/bundle/config/mutator/expand_workspace_root_test.go +++ b/bundle/config/mutator/expand_workspace_root_test.go @@ -16,8 +16,10 @@ func TestExpandWorkspaceRoot(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ - CurrentUser: &iam.User{ - UserName: "jane@doe.com", + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, }, RootPath: "~/foo", }, @@ -32,8 +34,10 @@ func TestExpandWorkspaceRootDoesNothing(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ - CurrentUser: &iam.User{ - UserName: "jane@doe.com", + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, }, RootPath: "/Users/charly@doe.com/foo", }, @@ -48,8 +52,10 @@ func TestExpandWorkspaceRootWithoutRoot(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ - CurrentUser: &iam.User{ - UserName: "jane@doe.com", + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, }, }, }, diff --git a/bundle/config/mutator/load_git_details.go b/bundle/config/mutator/load_git_details.go index 121924c62..ab47677dd 100644 --- a/bundle/config/mutator/load_git_details.go +++ b/bundle/config/mutator/load_git_details.go @@ -24,15 +24,20 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { if err != nil { return err } - // load branch name if undefined - if b.Config.Bundle.Git.Branch == "" { - branch, err := repo.CurrentBranch() - if err != nil { - log.Warnf(ctx, "failed to load current branch: %s", err) - } else { + + // Read branch name of current checkout + branch, err := repo.CurrentBranch() + if err == nil { + b.Config.Bundle.Git.ActualBranch = branch + if b.Config.Bundle.Git.Branch == "" { + // Only load branch if there's no user defined value + b.Config.Bundle.Git.Inferred = true b.Config.Bundle.Git.Branch = branch } + } else { + log.Warnf(ctx, "failed to load current branch: %s", err) } + // load commit hash if undefined if b.Config.Bundle.Git.Commit == "" { commit, err := repo.LatestCommit() diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 9a4486042..aa762e8e6 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -2,17 +2,19 @@ package mutator import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/scripts" ) func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ - DefineDefaultInclude(), + scripts.Execute(config.ScriptPreInit), ProcessRootIncludes(), - DefineDefaultEnvironment(), + DefineDefaultTarget(), LoadGitDetails(), } } -func DefaultMutatorsForEnvironment(env string) []bundle.Mutator { - return append(DefaultMutators(), SelectEnvironment(env)) +func DefaultMutatorsForTarget(env string) []bundle.Mutator { + return append(DefaultMutators(), SelectTarget(env)) } diff --git a/bundle/config/mutator/override_compute.go b/bundle/config/mutator/override_compute.go index ba3fd9940..21d950135 100644 --- a/bundle/config/mutator/override_compute.go +++ b/bundle/config/mutator/override_compute.go @@ -3,11 +3,11 @@ package mutator import ( "context" "fmt" - "os" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/env" ) type overrideCompute struct{} @@ -23,10 +23,10 @@ func (m *overrideCompute) Name() string { func overrideJobCompute(j *resources.Job, compute string) { for i := range j.Tasks { task := &j.Tasks[i] - if task.NewCluster != nil { + if task.NewCluster != nil || task.ExistingClusterId != "" || task.ComputeKey != "" || task.JobClusterKey != "" { task.NewCluster = nil - task.ExistingClusterId = compute - } else if task.ExistingClusterId != "" { + task.JobClusterKey = "" + task.ComputeKey = "" task.ExistingClusterId = compute } } @@ -35,12 +35,12 @@ func overrideJobCompute(j *resources.Job, compute string) { func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) error { if b.Config.Bundle.Mode != config.Development { if b.Config.Bundle.ComputeID != "" { - return fmt.Errorf("cannot override compute for an environment that does not use 'mode: development'") + return fmt.Errorf("cannot override compute for an target that does not use 'mode: development'") } return nil } - if os.Getenv("DATABRICKS_CLUSTER_ID") != "" { - b.Config.Bundle.ComputeID = os.Getenv("DATABRICKS_CLUSTER_ID") + if v := env.Get(ctx, "DATABRICKS_CLUSTER_ID"); v != "" { + b.Config.Bundle.ComputeID = v } if b.Config.Bundle.ComputeID == "" { diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index 9eb99edb9..cb37eeb5f 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -2,7 +2,6 @@ package mutator_test import ( "context" - "os" "testing" "github.com/databricks/cli/bundle" @@ -16,7 +15,7 @@ import ( ) func TestOverrideDevelopment(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "") + t.Setenv("DATABRICKS_CLUSTER_ID", "") bundle := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -34,6 +33,12 @@ func TestOverrideDevelopment(t *testing.T) { { ExistingClusterId: "cluster2", }, + { + ComputeKey: "compute_key", + }, + { + JobClusterKey: "cluster_key", + }, }, }}, }, @@ -47,10 +52,16 @@ func TestOverrideDevelopment(t *testing.T) { assert.Nil(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) + assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[2].ExistingClusterId) + assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[3].ExistingClusterId) + + assert.Nil(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) + assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[2].ComputeKey) + assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[3].JobClusterKey) } func TestOverrideDevelopmentEnv(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") bundle := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ @@ -77,6 +88,31 @@ func TestOverrideDevelopmentEnv(t *testing.T) { assert.Equal(t, "cluster2", bundle.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } +func TestOverridePipelineTask(t *testing.T) { + t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + bundle := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": {JobSettings: &jobs.JobSettings{ + Name: "job1", + Tasks: []jobs.Task{ + { + PipelineTask: &jobs.PipelineTask{}, + }, + }, + }}, + }, + }, + }, + } + + m := mutator.OverrideCompute() + err := m.Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) +} + func TestOverrideProduction(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ @@ -107,7 +143,7 @@ func TestOverrideProduction(t *testing.T) { } func TestOverrideProductionEnv(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") bundle := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ diff --git a/bundle/config/mutator/populate_current_user.go b/bundle/config/mutator/populate_current_user.go index 34c6ff6e3..bba0457c4 100644 --- a/bundle/config/mutator/populate_current_user.go +++ b/bundle/config/mutator/populate_current_user.go @@ -2,8 +2,11 @@ package mutator import ( "context" + "strings" + "unicode" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" ) type populateCurrentUser struct{} @@ -18,12 +21,32 @@ func (m *populateCurrentUser) Name() string { } func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error { + if b.Config.Workspace.CurrentUser != nil { + return nil + } + w := b.WorkspaceClient() me, err := w.CurrentUser.Me(ctx) if err != nil { return err } - b.Config.Workspace.CurrentUser = me + b.Config.Workspace.CurrentUser = &config.User{ + ShortName: getShortUserName(me.UserName), + User: me, + } return nil } + +// Get a short-form username, based on the user's primary email address. +// We leave the full range of unicode letters in tact, but remove all "special" characters, +// including dots, which are not supported in e.g. experiment names. +func getShortUserName(emailAddress string) string { + r := []rune(strings.Split(emailAddress, "@")[0]) + for i := 0; i < len(r); i++ { + if !unicode.IsLetter(r[i]) { + r[i] = '_' + } + } + return string(r) +} diff --git a/bundle/config/mutator/populate_current_user_test.go b/bundle/config/mutator/populate_current_user_test.go index 4c28d1cd3..79ec52b8f 100644 --- a/bundle/config/mutator/populate_current_user_test.go +++ b/bundle/config/mutator/populate_current_user_test.go @@ -1,3 +1,40 @@ package mutator -// We need to implement workspace client mocking to implement this test. +import "testing" + +func TestPopulateCurrentUser(t *testing.T) { + // We need to implement workspace client mocking to implement this test. +} + +func TestGetShortUserName(t *testing.T) { + tests := []struct { + name string + email string + expected string + }{ + { + name: "test alphanumeric characters", + email: "test.user@example.com", + expected: "test_user", + }, + { + name: "test unicode characters", + email: "tést.üser@example.com", + expected: "tést_üser", + }, + { + name: "test special characters", + email: "test$.user@example.com", + expected: "test__user", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := getShortUserName(tt.email) + if result != tt.expected { + t.Errorf("getShortUserName(%q) = %q; expected %q", tt.email, result, tt.expected) + } + }) + } +} diff --git a/bundle/config/mutator/process_environment_mode.go b/bundle/config/mutator/process_environment_mode.go deleted file mode 100644 index 3e1b7e819..000000000 --- a/bundle/config/mutator/process_environment_mode.go +++ /dev/null @@ -1,89 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - "path" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/ml" -) - -type processEnvironmentMode struct{} - -const developmentConcurrentRuns = 4 - -func ProcessEnvironmentMode() bundle.Mutator { - return &processEnvironmentMode{} -} - -func (m *processEnvironmentMode) Name() string { - return "ProcessEnvironmentMode" -} - -// Mark all resources as being for 'development' purposes, i.e. -// changing their their name, adding tags, and (in the future) -// marking them as 'hidden' in the UI. -func processDevelopmentMode(b *bundle.Bundle) error { - r := b.Config.Resources - - for i := range r.Jobs { - r.Jobs[i].Name = "[dev] " + r.Jobs[i].Name - if r.Jobs[i].Tags == nil { - r.Jobs[i].Tags = make(map[string]string) - } - r.Jobs[i].Tags["dev"] = "" - if r.Jobs[i].MaxConcurrentRuns == 0 { - r.Jobs[i].MaxConcurrentRuns = developmentConcurrentRuns - } - if r.Jobs[i].Schedule != nil { - r.Jobs[i].Schedule.PauseStatus = jobs.PauseStatusPaused - } - if r.Jobs[i].Continuous != nil { - r.Jobs[i].Continuous.PauseStatus = jobs.PauseStatusPaused - } - if r.Jobs[i].Trigger != nil { - r.Jobs[i].Trigger.PauseStatus = jobs.PauseStatusPaused - } - } - - for i := range r.Pipelines { - r.Pipelines[i].Name = "[dev] " + r.Pipelines[i].Name - r.Pipelines[i].Development = true - // (pipelines don't yet support tags) - } - - for i := range r.Models { - r.Models[i].Name = "[dev] " + r.Models[i].Name - r.Models[i].Tags = append(r.Models[i].Tags, ml.ModelTag{Key: "dev", Value: ""}) - } - - for i := range r.Experiments { - filepath := r.Experiments[i].Name - dir := path.Dir(filepath) - base := path.Base(filepath) - if dir == "." { - r.Experiments[i].Name = "[dev] " + base - } else { - r.Experiments[i].Name = dir + "/[dev] " + base - } - r.Experiments[i].Tags = append(r.Experiments[i].Tags, ml.ExperimentTag{Key: "dev", Value: ""}) - } - - return nil -} - -func (m *processEnvironmentMode) Apply(ctx context.Context, b *bundle.Bundle) error { - switch b.Config.Bundle.Mode { - case config.Development: - return processDevelopmentMode(b) - case "": - // No action - default: - return fmt.Errorf("unsupported value specified for 'mode': %s", b.Config.Bundle.Mode) - } - - return nil -} diff --git a/bundle/config/mutator/process_environment_mode_test.go b/bundle/config/mutator/process_environment_mode_test.go deleted file mode 100644 index 5342de212..000000000 --- a/bundle/config/mutator/process_environment_mode_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package mutator_test - -import ( - "context" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/ml" - "github.com/databricks/databricks-sdk-go/service/pipelines" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestProcessEnvironmentModeApplyDebug(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Bundle: config.Bundle{ - Mode: config.Development, - }, - Resources: config.Resources{ - Jobs: map[string]*resources.Job{ - "job1": {JobSettings: &jobs.JobSettings{Name: "job1"}}, - }, - Pipelines: map[string]*resources.Pipeline{ - "pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}}, - }, - Experiments: map[string]*resources.MlflowExperiment{ - "experiment1": {Experiment: &ml.Experiment{Name: "/Users/lennart.kats@databricks.com/experiment1"}}, - "experiment2": {Experiment: &ml.Experiment{Name: "experiment2"}}, - }, - Models: map[string]*resources.MlflowModel{ - "model1": {Model: &ml.Model{Name: "model1"}}, - }, - }, - }, - } - - m := mutator.ProcessEnvironmentMode() - err := m.Apply(context.Background(), bundle) - require.NoError(t, err) - assert.Equal(t, "[dev] job1", bundle.Config.Resources.Jobs["job1"].Name) - assert.Equal(t, "[dev] pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) - assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name) - assert.Equal(t, "[dev] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name) - assert.Equal(t, "[dev] model1", bundle.Config.Resources.Models["model1"].Name) - assert.Equal(t, "dev", bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key) - assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) -} - -func TestProcessEnvironmentModeApplyDefault(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Bundle: config.Bundle{ - Mode: "", - }, - Resources: config.Resources{ - Jobs: map[string]*resources.Job{ - "job1": {JobSettings: &jobs.JobSettings{Name: "job1"}}, - }, - Pipelines: map[string]*resources.Pipeline{ - "pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}}, - }, - }, - }, - } - - m := mutator.ProcessEnvironmentMode() - err := m.Apply(context.Background(), bundle) - require.NoError(t, err) - assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) - assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) - assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) -} diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index 454e3a987..5a5ab1b19 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -3,14 +3,25 @@ package mutator import ( "context" "fmt" + "os" "path/filepath" + "slices" "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "golang.org/x/exp/slices" + "github.com/databricks/cli/bundle/env" ) +// Get extra include paths from environment variable +func getExtraIncludePaths(ctx context.Context) []string { + value, exists := env.Includes(ctx) + if !exists { + return nil + } + return strings.Split(value, string(os.PathListSeparator)) +} + type processRootIncludes struct{} // ProcessRootIncludes expands the patterns in the configuration's include list @@ -27,14 +38,28 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error var out []bundle.Mutator // Map with files we've already seen to avoid loading them twice. - var seen = map[string]bool{ - config.FileName: true, + var seen = map[string]bool{} + + for _, file := range config.FileNames { + seen[file] = true } // Maintain list of files in order of files being loaded. // This is stored in the bundle configuration for observability. var files []string + // Converts extra include paths from environment variable to relative paths + for _, extraIncludePath := range getExtraIncludePaths(ctx) { + if filepath.IsAbs(extraIncludePath) { + rel, err := filepath.Rel(b.Config.Path, extraIncludePath) + if err != nil { + return fmt.Errorf("unable to include file '%s': %w", extraIncludePath, err) + } + extraIncludePath = rel + } + b.Config.Include = append(b.Config.Include, extraIncludePath) + } + // For each glob, find all files to load. // Ordering of the list of globs is maintained in the output. // For matches that appear in multiple globs, only the first is kept. diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index c7d00d88b..aec9b32df 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -3,13 +3,16 @@ package mutator_test import ( "context" "os" + "path" "path/filepath" "runtime" + "strings" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -61,7 +64,7 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { }, } - touch(t, bundle.Config.Path, "bundle.yml") + touch(t, bundle.Config.Path, "databricks.yml") touch(t, bundle.Config.Path, "a.yml") touch(t, bundle.Config.Path, "b.yml") @@ -122,3 +125,43 @@ func TestProcessRootIncludesNotExists(t *testing.T) { require.Error(t, err) assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") } + +func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { + rootPath := t.TempDir() + testYamlName := "extra_include_path.yml" + touch(t, rootPath, testYamlName) + t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) + + bundle := &bundle.Bundle{ + Config: config.Root{ + Path: rootPath, + }, + } + + err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Contains(t, bundle.Config.Include, testYamlName) +} + +func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { + rootPath := t.TempDir() + testYamlName := "extra_include_path.yml" + touch(t, rootPath, testYamlName) + t.Setenv(env.IncludesVariable, strings.Join( + []string{ + path.Join(rootPath, testYamlName), + path.Join(rootPath, testYamlName), + }, + string(os.PathListSeparator), + )) + + bundle := &bundle.Bundle{ + Config: config.Root{ + Path: rootPath, + }, + } + + err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Equal(t, []string{testYamlName}, bundle.Config.Include) +} diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go new file mode 100644 index 000000000..93149ad04 --- /dev/null +++ b/bundle/config/mutator/process_target_mode.go @@ -0,0 +1,178 @@ +package mutator + +import ( + "context" + "fmt" + "path" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/auth" + "github.com/databricks/cli/libs/log" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/databricks/databricks-sdk-go/service/ml" +) + +type processTargetMode struct{} + +const developmentConcurrentRuns = 4 + +func ProcessTargetMode() bundle.Mutator { + return &processTargetMode{} +} + +func (m *processTargetMode) Name() string { + return "ProcessTargetMode" +} + +// Mark all resources as being for 'development' purposes, i.e. +// changing their their name, adding tags, and (in the future) +// marking them as 'hidden' in the UI. +func transformDevelopmentMode(b *bundle.Bundle) error { + r := b.Config.Resources + + prefix := "[dev " + b.Config.Workspace.CurrentUser.ShortName + "] " + + for i := range r.Jobs { + r.Jobs[i].Name = prefix + r.Jobs[i].Name + if r.Jobs[i].Tags == nil { + r.Jobs[i].Tags = make(map[string]string) + } + r.Jobs[i].Tags["dev"] = b.Config.Workspace.CurrentUser.DisplayName + if r.Jobs[i].MaxConcurrentRuns == 0 { + r.Jobs[i].MaxConcurrentRuns = developmentConcurrentRuns + } + if r.Jobs[i].Schedule != nil { + r.Jobs[i].Schedule.PauseStatus = jobs.PauseStatusPaused + } + if r.Jobs[i].Continuous != nil { + r.Jobs[i].Continuous.PauseStatus = jobs.PauseStatusPaused + } + if r.Jobs[i].Trigger != nil { + r.Jobs[i].Trigger.PauseStatus = jobs.PauseStatusPaused + } + } + + for i := range r.Pipelines { + r.Pipelines[i].Name = prefix + r.Pipelines[i].Name + r.Pipelines[i].Development = true + // (pipelines don't yet support tags) + } + + for i := range r.Models { + r.Models[i].Name = prefix + r.Models[i].Name + r.Models[i].Tags = append(r.Models[i].Tags, ml.ModelTag{Key: "dev", Value: ""}) + } + + for i := range r.Experiments { + filepath := r.Experiments[i].Name + dir := path.Dir(filepath) + base := path.Base(filepath) + if dir == "." { + r.Experiments[i].Name = prefix + base + } else { + r.Experiments[i].Name = dir + "/" + prefix + base + } + r.Experiments[i].Tags = append(r.Experiments[i].Tags, ml.ExperimentTag{Key: "dev", Value: b.Config.Workspace.CurrentUser.DisplayName}) + } + + for i := range r.ModelServingEndpoints { + prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" + r.ModelServingEndpoints[i].Name = prefix + r.ModelServingEndpoints[i].Name + // (model serving doesn't yet support tags) + } + + return nil +} + +func validateDevelopmentMode(b *bundle.Bundle) error { + if path := findIncorrectPath(b, config.Development); path != "" { + return fmt.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path) + } + return nil +} + +func findIncorrectPath(b *bundle.Bundle, mode config.Mode) string { + username := b.Config.Workspace.CurrentUser.UserName + containsExpected := true + if mode == config.Production { + containsExpected = false + } + + if strings.Contains(b.Config.Workspace.RootPath, username) != containsExpected && b.Config.Workspace.RootPath != "" { + return "root_path" + } + if strings.Contains(b.Config.Workspace.StatePath, username) != containsExpected { + return "state_path" + } + if strings.Contains(b.Config.Workspace.FilesPath, username) != containsExpected { + return "files_path" + } + if strings.Contains(b.Config.Workspace.ArtifactsPath, username) != containsExpected { + return "artifacts_path" + } + return "" +} + +func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { + if b.Config.Bundle.Git.Inferred { + env := b.Config.Bundle.Target + log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env) + } + + r := b.Config.Resources + for i := range r.Pipelines { + if r.Pipelines[i].Development { + return fmt.Errorf("target with 'mode: production' cannot specify a pipeline with 'development: true'") + } + } + + if !isPrincipalUsed { + if path := findIncorrectPath(b, config.Production); path != "" { + message := "%s must not contain the current username when using 'mode: production'" + if path == "root_path" { + return fmt.Errorf(message+"\n tip: set workspace.root_path to a shared path such as /Shared/.bundle/${bundle.name}/${bundle.target}", path) + } else { + return fmt.Errorf(message, path) + } + } + + if !isRunAsSet(r) { + return fmt.Errorf("'run_as' must be set for all jobs when using 'mode: production'") + } + } + return nil +} + +// Determines whether run_as is explicitly set for all resources. +// We do this in a best-effort fashion rather than check the top-level +// 'run_as' field because the latter is not required to be set. +func isRunAsSet(r config.Resources) bool { + for i := range r.Jobs { + if r.Jobs[i].RunAs == nil { + return false + } + } + return true +} + +func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { + switch b.Config.Bundle.Mode { + case config.Development: + err := validateDevelopmentMode(b) + if err != nil { + return err + } + return transformDevelopmentMode(b) + case config.Production: + isPrincipal := auth.IsServicePrincipal(b.Config.Workspace.CurrentUser.Id) + return validateProductionMode(ctx, b, isPrincipal) + case "": + // No action + default: + return fmt.Errorf("unsupported value '%s' specified for 'mode': must be either 'development' or 'production'", b.Config.Bundle.Mode) + } + + return nil +} diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go new file mode 100644 index 000000000..4ea33c70b --- /dev/null +++ b/bundle/config/mutator/process_target_mode_test.go @@ -0,0 +1,187 @@ +package mutator + +import ( + "context" + "reflect" + "strings" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/databricks/databricks-sdk-go/service/ml" + "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/databricks/databricks-sdk-go/service/serving" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func mockBundle(mode config.Mode) *bundle.Bundle { + return &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Mode: mode, + Git: config.Git{ + OriginURL: "http://origin", + Branch: "main", + }, + }, + Workspace: config.Workspace{ + CurrentUser: &config.User{ + ShortName: "lennart", + User: &iam.User{ + UserName: "lennart@company.com", + Id: "1", + }, + }, + StatePath: "/Users/lennart@company.com/.bundle/x/y/state", + ArtifactsPath: "/Users/lennart@company.com/.bundle/x/y/artifacts", + FilesPath: "/Users/lennart@company.com/.bundle/x/y/files", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": {JobSettings: &jobs.JobSettings{Name: "job1"}}, + }, + Pipelines: map[string]*resources.Pipeline{ + "pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}}, + }, + Experiments: map[string]*resources.MlflowExperiment{ + "experiment1": {Experiment: &ml.Experiment{Name: "/Users/lennart.kats@databricks.com/experiment1"}}, + "experiment2": {Experiment: &ml.Experiment{Name: "experiment2"}}, + }, + Models: map[string]*resources.MlflowModel{ + "model1": {Model: &ml.Model{Name: "model1"}}, + }, + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "servingendpoint1": {CreateServingEndpoint: &serving.CreateServingEndpoint{Name: "servingendpoint1"}}, + }, + }, + }, + } +} + +func TestProcessTargetModeDevelopment(t *testing.T) { + bundle := mockBundle(config.Development) + + m := ProcessTargetMode() + err := m.Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Equal(t, "[dev lennart] job1", bundle.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "[dev lennart] pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) + assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name) + assert.Equal(t, "[dev lennart] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name) + assert.Equal(t, "[dev lennart] model1", bundle.Config.Resources.Models["model1"].Name) + assert.Equal(t, "dev_lennart_servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) + assert.Equal(t, "dev", bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key) + assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) +} + +func TestProcessTargetModeDefault(t *testing.T) { + bundle := mockBundle("") + + m := ProcessTargetMode() + err := m.Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) + assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) +} + +func TestProcessTargetModeProduction(t *testing.T) { + bundle := mockBundle(config.Production) + + err := validateProductionMode(context.Background(), bundle, false) + require.ErrorContains(t, err, "state_path") + + bundle.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state" + bundle.Config.Workspace.ArtifactsPath = "/Shared/.bundle/x/y/artifacts" + bundle.Config.Workspace.FilesPath = "/Shared/.bundle/x/y/files" + + err = validateProductionMode(context.Background(), bundle, false) + require.ErrorContains(t, err, "production") + + permissions := []resources.Permission{ + { + Level: "CAN_MANAGE", + UserName: "user@company.com", + }, + } + bundle.Config.Resources.Jobs["job1"].Permissions = permissions + bundle.Config.Resources.Jobs["job1"].RunAs = &jobs.JobRunAs{UserName: "user@company.com"} + bundle.Config.Resources.Pipelines["pipeline1"].Permissions = permissions + bundle.Config.Resources.Experiments["experiment1"].Permissions = permissions + bundle.Config.Resources.Experiments["experiment2"].Permissions = permissions + bundle.Config.Resources.Models["model1"].Permissions = permissions + bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions + + err = validateProductionMode(context.Background(), bundle, false) + require.NoError(t, err) + + assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) + assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) +} + +func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) { + bundle := mockBundle(config.Production) + + // Our target has all kinds of problems when not using service principals ... + err := validateProductionMode(context.Background(), bundle, false) + require.Error(t, err) + + // ... but we're much less strict when a principal is used + err = validateProductionMode(context.Background(), bundle, true) + require.NoError(t, err) +} + +// Make sure that we have test coverage for all resource types +func TestAllResourcesMocked(t *testing.T) { + bundle := mockBundle(config.Development) + resources := reflect.ValueOf(bundle.Config.Resources) + + for i := 0; i < resources.NumField(); i++ { + field := resources.Field(i) + if field.Kind() == reflect.Map { + assert.True( + t, + !field.IsNil() && field.Len() > 0, + "process_target_mode should support '%s' (please add it to process_target_mode.go and extend the test suite)", + resources.Type().Field(i).Name, + ) + } + } +} + +// Make sure that we at least rename all resources +func TestAllResourcesRenamed(t *testing.T) { + bundle := mockBundle(config.Development) + resources := reflect.ValueOf(bundle.Config.Resources) + + m := ProcessTargetMode() + err := m.Apply(context.Background(), bundle) + require.NoError(t, err) + + for i := 0; i < resources.NumField(); i++ { + field := resources.Field(i) + + if field.Kind() == reflect.Map { + for _, key := range field.MapKeys() { + resource := field.MapIndex(key) + nameField := resource.Elem().FieldByName("Name") + if nameField.IsValid() && nameField.Kind() == reflect.String { + assert.True( + t, + strings.Contains(nameField.String(), "dev"), + "process_target_mode should rename '%s' in '%s'", + key, + resources.Type().Field(i).Name, + ) + } + } + } + } +} diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go new file mode 100644 index 000000000..7d1a49175 --- /dev/null +++ b/bundle/config/mutator/run_as.go @@ -0,0 +1,65 @@ +package mutator + +import ( + "context" + "slices" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +type setRunAs struct { +} + +// SetRunAs mutator is used to go over defined resources such as Jobs and DLT Pipelines +// And set correct execution identity ("run_as" for a job or "is_owner" permission for DLT) +// if top-level "run-as" section is defined in the configuration. +func SetRunAs() bundle.Mutator { + return &setRunAs{} +} + +func (m *setRunAs) Name() string { + return "SetRunAs" +} + +func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) error { + runAs := b.Config.RunAs + if runAs == nil { + return nil + } + + for i := range b.Config.Resources.Jobs { + job := b.Config.Resources.Jobs[i] + if job.RunAs != nil { + continue + } + job.RunAs = &jobs.JobRunAs{ + ServicePrincipalName: runAs.ServicePrincipalName, + UserName: runAs.UserName, + } + } + + me := b.Config.Workspace.CurrentUser.UserName + // If user deploying the bundle and the one defined in run_as are the same + // Do not add IS_OWNER permission. Current user is implied to be an owner in this case. + // Otherwise, it will fail due to this bug https://github.com/databricks/terraform-provider-databricks/issues/2407 + if runAs.UserName == me || runAs.ServicePrincipalName == me { + return nil + } + + for i := range b.Config.Resources.Pipelines { + pipeline := b.Config.Resources.Pipelines[i] + pipeline.Permissions = slices.DeleteFunc(pipeline.Permissions, func(p resources.Permission) bool { + return (runAs.ServicePrincipalName != "" && p.ServicePrincipalName == runAs.ServicePrincipalName) || + (runAs.UserName != "" && p.UserName == runAs.UserName) + }) + pipeline.Permissions = append(pipeline.Permissions, resources.Permission{ + Level: "IS_OWNER", + ServicePrincipalName: runAs.ServicePrincipalName, + UserName: runAs.UserName, + }) + } + + return nil +} diff --git a/bundle/config/mutator/select_default_environment.go b/bundle/config/mutator/select_default_environment.go deleted file mode 100644 index 0ed1d2db9..000000000 --- a/bundle/config/mutator/select_default_environment.go +++ /dev/null @@ -1,54 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - "strings" - - "github.com/databricks/cli/bundle" - "golang.org/x/exp/maps" -) - -type selectDefaultEnvironment struct{} - -// SelectDefaultEnvironment merges the default environment into the root configuration. -func SelectDefaultEnvironment() bundle.Mutator { - return &selectDefaultEnvironment{} -} - -func (m *selectDefaultEnvironment) Name() string { - return "SelectDefaultEnvironment" -} - -func (m *selectDefaultEnvironment) Apply(ctx context.Context, b *bundle.Bundle) error { - if len(b.Config.Environments) == 0 { - return fmt.Errorf("no environments defined") - } - - // One environment means there's only one default. - names := maps.Keys(b.Config.Environments) - if len(names) == 1 { - return SelectEnvironment(names[0]).Apply(ctx, b) - } - - // Multiple environments means we look for the `default` flag. - var defaults []string - for name, env := range b.Config.Environments { - if env != nil && env.Default { - defaults = append(defaults, name) - } - } - - // It is invalid to have multiple environments with the `default` flag set. - if len(defaults) > 1 { - return fmt.Errorf("multiple environments are marked as default (%s)", strings.Join(defaults, ", ")) - } - - // If no environment has the `default` flag set, ask the user to specify one. - if len(defaults) == 0 { - return fmt.Errorf("please specify environment") - } - - // One default remaining. - return SelectEnvironment(defaults[0]).Apply(ctx, b) -} diff --git a/bundle/config/mutator/select_default_environment_test.go b/bundle/config/mutator/select_default_environment_test.go deleted file mode 100644 index cc8f9c01d..000000000 --- a/bundle/config/mutator/select_default_environment_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package mutator_test - -import ( - "context" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/stretchr/testify/assert" -) - -func TestSelectDefaultEnvironmentNoEnvironments(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{}, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "no environments defined") -} - -func TestSelectDefaultEnvironmentSingleEnvironments(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.NoError(t, err) - assert.Equal(t, "foo", bundle.Config.Bundle.Environment) -} - -func TestSelectDefaultEnvironmentNoDefaults(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {}, - "bar": {}, - "qux": {}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "please specify environment") -} - -func TestSelectDefaultEnvironmentNoDefaultsWithNil(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": nil, - "bar": nil, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "please specify environment") -} - -func TestSelectDefaultEnvironmentMultipleDefaults(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {Default: true}, - "bar": {Default: true}, - "qux": {Default: true}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "multiple environments are marked as default") -} - -func TestSelectDefaultEnvironmentSingleDefault(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {}, - "bar": {Default: true}, - "qux": {}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Bundle.Environment) -} diff --git a/bundle/config/mutator/select_default_target.go b/bundle/config/mutator/select_default_target.go new file mode 100644 index 000000000..8abcfe4ff --- /dev/null +++ b/bundle/config/mutator/select_default_target.go @@ -0,0 +1,54 @@ +package mutator + +import ( + "context" + "fmt" + "strings" + + "github.com/databricks/cli/bundle" + "golang.org/x/exp/maps" +) + +type selectDefaultTarget struct{} + +// SelectDefaultTarget merges the default target into the root configuration. +func SelectDefaultTarget() bundle.Mutator { + return &selectDefaultTarget{} +} + +func (m *selectDefaultTarget) Name() string { + return "SelectDefaultTarget" +} + +func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error { + if len(b.Config.Targets) == 0 { + return fmt.Errorf("no targets defined") + } + + // One target means there's only one default. + names := maps.Keys(b.Config.Targets) + if len(names) == 1 { + return SelectTarget(names[0]).Apply(ctx, b) + } + + // Multiple targets means we look for the `default` flag. + var defaults []string + for name, env := range b.Config.Targets { + if env != nil && env.Default { + defaults = append(defaults, name) + } + } + + // It is invalid to have multiple targets with the `default` flag set. + if len(defaults) > 1 { + return fmt.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", ")) + } + + // If no target has the `default` flag set, ask the user to specify one. + if len(defaults) == 0 { + return fmt.Errorf("please specify target") + } + + // One default remaining. + return SelectTarget(defaults[0]).Apply(ctx, b) +} diff --git a/bundle/config/mutator/select_default_target_test.go b/bundle/config/mutator/select_default_target_test.go new file mode 100644 index 000000000..5d7b93b28 --- /dev/null +++ b/bundle/config/mutator/select_default_target_test.go @@ -0,0 +1,90 @@ +package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/stretchr/testify/assert" +) + +func TestSelectDefaultTargetNoTargets(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{}, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "no targets defined") +} + +func TestSelectDefaultTargetSingleTargets(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.NoError(t, err) + assert.Equal(t, "foo", bundle.Config.Bundle.Target) +} + +func TestSelectDefaultTargetNoDefaults(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {}, + "bar": {}, + "qux": {}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "please specify target") +} + +func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": nil, + "bar": nil, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "please specify target") +} + +func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {Default: true}, + "bar": {Default: true}, + "qux": {Default: true}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "multiple targets are marked as default") +} + +func TestSelectDefaultTargetSingleDefault(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {}, + "bar": {Default: true}, + "qux": {}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.NoError(t, err) + assert.Equal(t, "bar", bundle.Config.Bundle.Target) +} diff --git a/bundle/config/mutator/select_environment.go b/bundle/config/mutator/select_environment.go deleted file mode 100644 index 6ced66e86..000000000 --- a/bundle/config/mutator/select_environment.go +++ /dev/null @@ -1,48 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" -) - -type selectEnvironment struct { - name string -} - -// SelectEnvironment merges the specified environment into the root configuration. -func SelectEnvironment(name string) bundle.Mutator { - return &selectEnvironment{ - name: name, - } -} - -func (m *selectEnvironment) Name() string { - return fmt.Sprintf("SelectEnvironment(%s)", m.name) -} - -func (m *selectEnvironment) Apply(_ context.Context, b *bundle.Bundle) error { - if b.Config.Environments == nil { - return fmt.Errorf("no environments defined") - } - - // Get specified environment - env, ok := b.Config.Environments[m.name] - if !ok { - return fmt.Errorf("%s: no such environment", m.name) - } - - // Merge specified environment into root configuration structure. - err := b.Config.MergeEnvironment(env) - if err != nil { - return err - } - - // Store specified environment in configuration for reference. - b.Config.Bundle.Environment = m.name - - // Clear environments after loading. - b.Config.Environments = nil - return nil -} diff --git a/bundle/config/mutator/select_target.go b/bundle/config/mutator/select_target.go new file mode 100644 index 000000000..2ad431128 --- /dev/null +++ b/bundle/config/mutator/select_target.go @@ -0,0 +1,56 @@ +package mutator + +import ( + "context" + "fmt" + "strings" + + "github.com/databricks/cli/bundle" + "golang.org/x/exp/maps" +) + +type selectTarget struct { + name string +} + +// SelectTarget merges the specified target into the root configuration. +func SelectTarget(name string) bundle.Mutator { + return &selectTarget{ + name: name, + } +} + +func (m *selectTarget) Name() string { + return fmt.Sprintf("SelectTarget(%s)", m.name) +} + +func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { + if b.Config.Targets == nil { + return fmt.Errorf("no targets defined") + } + + // Get specified target + target, ok := b.Config.Targets[m.name] + if !ok { + return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", ")) + } + + // Merge specified target into root configuration structure. + err := b.Config.MergeTargetOverrides(target) + if err != nil { + return err + } + + // Store specified target in configuration for reference. + b.Config.Bundle.Target = m.name + + // We do this for backward compatibility. + // TODO: remove when Environments section is not supported anymore. + b.Config.Bundle.Environment = b.Config.Bundle.Target + + // Clear targets after loading. + b.Config.Targets = nil + b.Config.Environments = nil + + return nil +} diff --git a/bundle/config/mutator/select_environment_test.go b/bundle/config/mutator/select_target_test.go similarity index 62% rename from bundle/config/mutator/select_environment_test.go rename to bundle/config/mutator/select_target_test.go index 73b3a7893..dfcd8cb08 100644 --- a/bundle/config/mutator/select_environment_test.go +++ b/bundle/config/mutator/select_target_test.go @@ -11,13 +11,13 @@ import ( "github.com/stretchr/testify/require" ) -func TestSelectEnvironment(t *testing.T) { +func TestSelectTarget(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ Host: "foo", }, - Environments: map[string]*config.Environment{ + Targets: map[string]*config.Target{ "default": { Workspace: &config.Workspace{ Host: "bar", @@ -26,19 +26,19 @@ func TestSelectEnvironment(t *testing.T) { }, }, } - err := mutator.SelectEnvironment("default").Apply(context.Background(), bundle) + err := mutator.SelectTarget("default").Apply(context.Background(), bundle) require.NoError(t, err) assert.Equal(t, "bar", bundle.Config.Workspace.Host) } -func TestSelectEnvironmentNotFound(t *testing.T) { +func TestSelectTargetNotFound(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ - Environments: map[string]*config.Environment{ + Targets: map[string]*config.Target{ "default": {}, }, }, } - err := mutator.SelectEnvironment("doesnt-exist").Apply(context.Background(), bundle) - require.Error(t, err, "no environments defined") + err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), bundle) + require.Error(t, err, "no targets defined") } diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go index 427b6dce4..4bf8ff82a 100644 --- a/bundle/config/mutator/set_variables.go +++ b/bundle/config/mutator/set_variables.go @@ -3,10 +3,10 @@ package mutator import ( "context" "fmt" - "os" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/cli/libs/env" ) const bundleVarPrefix = "BUNDLE_VAR_" @@ -21,7 +21,7 @@ func (m *setVariables) Name() string { return "SetVariables" } -func setVariable(v *variable.Variable, name string) error { +func setVariable(ctx context.Context, v *variable.Variable, name string) error { // case: variable already has value initialized, so skip if v.HasValue() { return nil @@ -29,7 +29,7 @@ func setVariable(v *variable.Variable, name string) error { // case: read and set variable value from process environment envVarName := bundleVarPrefix + name - if val, ok := os.LookupEnv(envVarName); ok { + if val, ok := env.Lookup(ctx, envVarName); ok { err := v.Set(val) if err != nil { return fmt.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %w`, val, name, envVarName, err) @@ -54,7 +54,7 @@ func setVariable(v *variable.Variable, name string) error { func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) error { for name, variable := range b.Config.Variables { - err := setVariable(variable, name) + err := setVariable(ctx, variable, name) if err != nil { return err } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index 91948aa4b..323f1e864 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -21,7 +21,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { // set value for variable as an environment variable t.Setenv("BUNDLE_VAR_foo", "process-env") - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "process-env") } @@ -33,7 +33,7 @@ func TestSetVariableUsingDefaultValue(t *testing.T) { Default: &defaultVal, } - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "default") } @@ -49,7 +49,7 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { // since a value is already assigned to the variable, it would not be overridden // by the default value - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "assigned-value") } @@ -68,7 +68,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { // since a value is already assigned to the variable, it would not be overridden // by the value from environment - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "assigned-value") } @@ -79,7 +79,7 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) { } // fails because we could not resolve a value for the variable - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable") } diff --git a/bundle/config/mutator/trampoline.go b/bundle/config/mutator/trampoline.go new file mode 100644 index 000000000..52d62c1ba --- /dev/null +++ b/bundle/config/mutator/trampoline.go @@ -0,0 +1,100 @@ +package mutator + +import ( + "context" + "fmt" + "os" + "path" + "path/filepath" + "text/template" + + "github.com/databricks/cli/bundle" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +type TaskWithJobKey struct { + Task *jobs.Task + JobKey string +} + +type TrampolineFunctions interface { + GetTemplateData(task *jobs.Task) (map[string]any, error) + GetTasks(b *bundle.Bundle) []TaskWithJobKey + CleanUp(task *jobs.Task) error +} +type trampoline struct { + name string + functions TrampolineFunctions + template string +} + +func NewTrampoline( + name string, + functions TrampolineFunctions, + template string, +) *trampoline { + return &trampoline{name, functions, template} +} + +func (m *trampoline) Name() string { + return fmt.Sprintf("trampoline(%s)", m.name) +} + +func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) error { + tasks := m.functions.GetTasks(b) + for _, task := range tasks { + err := m.generateNotebookWrapper(ctx, b, task) + if err != nil { + return err + } + } + return nil +} + +func (m *trampoline) generateNotebookWrapper(ctx context.Context, b *bundle.Bundle, task TaskWithJobKey) error { + internalDir, err := b.InternalDir(ctx) + if err != nil { + return err + } + + notebookName := fmt.Sprintf("notebook_%s_%s", task.JobKey, task.Task.TaskKey) + localNotebookPath := filepath.Join(internalDir, notebookName+".py") + + err = os.MkdirAll(filepath.Dir(localNotebookPath), 0755) + if err != nil { + return err + } + + f, err := os.Create(localNotebookPath) + if err != nil { + return err + } + defer f.Close() + + data, err := m.functions.GetTemplateData(task.Task) + if err != nil { + return err + } + + t, err := template.New(notebookName).Parse(m.template) + if err != nil { + return err + } + + internalDirRel, err := filepath.Rel(b.Config.Path, internalDir) + if err != nil { + return err + } + + err = m.functions.CleanUp(task.Task) + if err != nil { + return err + } + remotePath := path.Join(b.Config.Workspace.FilesPath, filepath.ToSlash(internalDirRel), notebookName) + + task.Task.NotebookTask = &jobs.NotebookTask{ + NotebookPath: remotePath, + } + + return t.Execute(f, data) +} diff --git a/bundle/config/mutator/trampoline_test.go b/bundle/config/mutator/trampoline_test.go new file mode 100644 index 000000000..a3e06b303 --- /dev/null +++ b/bundle/config/mutator/trampoline_test.go @@ -0,0 +1,98 @@ +package mutator + +import ( + "context" + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/require" +) + +type functions struct{} + +func (f *functions) GetTasks(b *bundle.Bundle) []TaskWithJobKey { + tasks := make([]TaskWithJobKey, 0) + for k := range b.Config.Resources.Jobs["test"].Tasks { + tasks = append(tasks, TaskWithJobKey{ + JobKey: "test", + Task: &b.Config.Resources.Jobs["test"].Tasks[k], + }) + } + + return tasks +} + +func (f *functions) GetTemplateData(task *jobs.Task) (map[string]any, error) { + if task.PythonWheelTask == nil { + return nil, fmt.Errorf("PythonWheelTask cannot be nil") + } + + data := make(map[string]any) + data["MyName"] = "Trampoline" + return data, nil +} + +func (f *functions) CleanUp(task *jobs.Task) error { + task.PythonWheelTask = nil + return nil +} + +func TestGenerateTrampoline(t *testing.T) { + tmpDir := t.TempDir() + + tasks := []jobs.Task{ + { + TaskKey: "to_trampoline", + PythonWheelTask: &jobs.PythonWheelTask{ + PackageName: "test", + EntryPoint: "run", + }}, + } + + b := &bundle.Bundle{ + Config: config.Root{ + Path: tmpDir, + Bundle: config.Bundle{ + Target: "development", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "test": { + Paths: paths.Paths{ + ConfigFilePath: tmpDir, + }, + JobSettings: &jobs.JobSettings{ + Tasks: tasks, + }, + }, + }, + }, + }, + } + ctx := context.Background() + + funcs := functions{} + trampoline := NewTrampoline("test_trampoline", &funcs, "Hello from {{.MyName}}") + err := bundle.Apply(ctx, b, trampoline) + require.NoError(t, err) + + dir, err := b.InternalDir(ctx) + require.NoError(t, err) + filename := filepath.Join(dir, "notebook_test_to_trampoline.py") + + bytes, err := os.ReadFile(filename) + require.NoError(t, err) + + require.Equal(t, "Hello from Trampoline", string(bytes)) + + task := b.Config.Resources.Jobs["test"].Tasks[0] + require.Equal(t, task.NotebookTask.NotebookPath, ".databricks/bundle/development/.internal/notebook_test_to_trampoline") + require.Nil(t, task.PythonWheelTask) +} diff --git a/bundle/config/mutator/translate_paths.go b/bundle/config/mutator/translate_paths.go index 08f839861..acfd55258 100644 --- a/bundle/config/mutator/translate_paths.go +++ b/bundle/config/mutator/translate_paths.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "net/url" "os" "path" "path/filepath" @@ -11,8 +12,6 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/notebook" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/pipelines" ) type ErrIsNotebook struct { @@ -44,7 +43,9 @@ func (m *translatePaths) Name() string { return "TranslatePaths" } -// rewritePath converts a given relative path to a stable remote workspace path. +type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (string, error) + +// rewritePath converts a given relative path from the loaded config to a new path based on the passed rewriting function // // It takes these arguments: // - The argument `dir` is the directory relative to which the given relative path is. @@ -57,13 +58,23 @@ func (m *translatePaths) rewritePath( dir string, b *bundle.Bundle, p *string, - fn func(literal, localPath, remotePath string) (string, error), + fn rewriteFunc, ) error { // We assume absolute paths point to a location in the workspace if path.IsAbs(filepath.ToSlash(*p)) { return nil } + url, err := url.Parse(*p) + if err != nil { + return err + } + + // If the file path has scheme, it's a full path and we don't need to transform it + if url.Scheme != "" { + return nil + } + // Local path is relative to the directory the resource was defined in. localPath := filepath.Join(dir, filepath.FromSlash(*p)) if interp, ok := m.seen[localPath]; ok { @@ -72,19 +83,19 @@ func (m *translatePaths) rewritePath( } // Remote path must be relative to the bundle root. - remotePath, err := filepath.Rel(b.Config.Path, localPath) + localRelPath, err := filepath.Rel(b.Config.Path, localPath) if err != nil { return err } - if strings.HasPrefix(remotePath, "..") { + if strings.HasPrefix(localRelPath, "..") { return fmt.Errorf("path %s is not contained in bundle root path", localPath) } // Prefix remote path with its remote root path. - remotePath = path.Join(b.Config.Workspace.FilesPath, filepath.ToSlash(remotePath)) + remotePath := path.Join(b.Config.Workspace.FilesPath, filepath.ToSlash(localRelPath)) // Convert local path into workspace path via specified function. - interp, err := fn(*p, localPath, filepath.ToSlash(remotePath)) + interp, err := fn(*p, localPath, localRelPath, filepath.ToSlash(remotePath)) if err != nil { return err } @@ -94,81 +105,69 @@ func (m *translatePaths) rewritePath( return nil } -func (m *translatePaths) translateNotebookPath(literal, localPath, remotePath string) (string, error) { - nb, _, err := notebook.Detect(localPath) +func translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { + nb, _, err := notebook.Detect(localFullPath) if os.IsNotExist(err) { return "", fmt.Errorf("notebook %s not found", literal) } if err != nil { - return "", fmt.Errorf("unable to determine if %s is a notebook: %w", localPath, err) + return "", fmt.Errorf("unable to determine if %s is a notebook: %w", localFullPath, err) } if !nb { - return "", ErrIsNotNotebook{localPath} + return "", ErrIsNotNotebook{localFullPath} } // Upon import, notebooks are stripped of their extension. - return strings.TrimSuffix(remotePath, filepath.Ext(localPath)), nil + return strings.TrimSuffix(remotePath, filepath.Ext(localFullPath)), nil } -func (m *translatePaths) translateFilePath(literal, localPath, remotePath string) (string, error) { - nb, _, err := notebook.Detect(localPath) +func translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) { + nb, _, err := notebook.Detect(localFullPath) if os.IsNotExist(err) { return "", fmt.Errorf("file %s not found", literal) } if err != nil { - return "", fmt.Errorf("unable to determine if %s is not a notebook: %w", localPath, err) + return "", fmt.Errorf("unable to determine if %s is not a notebook: %w", localFullPath, err) } if nb { - return "", ErrIsNotebook{localPath} + return "", ErrIsNotebook{localFullPath} } return remotePath, nil } -func (m *translatePaths) translateJobTask(dir string, b *bundle.Bundle, task *jobs.Task) error { - var err error - - if task.NotebookTask != nil { - err = m.rewritePath(dir, b, &task.NotebookTask.NotebookPath, m.translateNotebookPath) - if target := (&ErrIsNotNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a notebook for "tasks.notebook_task.notebook_path" but got a file: %w`, target) - } - if err != nil { - return err - } - } - - if task.SparkPythonTask != nil { - err = m.rewritePath(dir, b, &task.SparkPythonTask.PythonFile, m.translateFilePath) - if target := (&ErrIsNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a file for "tasks.spark_python_task.python_file" but got a notebook: %w`, target) - } - if err != nil { - return err - } - } - - return nil +func translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) { + return localRelPath, nil } -func (m *translatePaths) translatePipelineLibrary(dir string, b *bundle.Bundle, library *pipelines.PipelineLibrary) error { - var err error +type transformer struct { + // A directory path relative to which `path` will be transformed + dir string + // A path to transform + path *string + // Name of the config property where the path string is coming from + configPath string + // A function that performs the actual rewriting logic. + fn rewriteFunc +} - if library.Notebook != nil { - err = m.rewritePath(dir, b, &library.Notebook.Path, m.translateNotebookPath) - if target := (&ErrIsNotNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a notebook for "libraries.notebook.path" but got a file: %w`, target) - } - if err != nil { - return err - } - } +type transformFunc func(resource any, dir string) *transformer - if library.File != nil { - err = m.rewritePath(dir, b, &library.File.Path, m.translateFilePath) - if target := (&ErrIsNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a file for "libraries.file.path" but got a notebook: %w`, target) +// Apply all matches transformers for the given resource +func (m *translatePaths) applyTransformers(funcs []transformFunc, b *bundle.Bundle, resource any, dir string) error { + for _, transformFn := range funcs { + transformer := transformFn(resource, dir) + if transformer == nil { + continue } + + err := m.rewritePath(transformer.dir, b, transformer.path, transformer.fn) if err != nil { + if target := (&ErrIsNotebook{}); errors.As(err, target) { + return fmt.Errorf(`expected a file for "%s" but got a notebook: %w`, transformer.configPath, target) + } + if target := (&ErrIsNotNotebook{}); errors.As(err, target) { + return fmt.Errorf(`expected a notebook for "%s" but got a file: %w`, transformer.configPath, target) + } return err } } @@ -179,36 +178,14 @@ func (m *translatePaths) translatePipelineLibrary(dir string, b *bundle.Bundle, func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) error { m.seen = make(map[string]string) - for key, job := range b.Config.Resources.Jobs { - dir, err := job.ConfigFileDirectory() + for _, fn := range []func(*translatePaths, *bundle.Bundle) error{ + applyJobTransformers, + applyPipelineTransformers, + applyArtifactTransformers, + } { + err := fn(m, b) if err != nil { - return fmt.Errorf("unable to determine directory for job %s: %w", key, err) - } - - // Do not translate job task paths if using git source - if job.GitSource != nil { - continue - } - - for i := 0; i < len(job.Tasks); i++ { - err := m.translateJobTask(dir, b, &job.Tasks[i]) - if err != nil { - return err - } - } - } - - for key, pipeline := range b.Config.Resources.Pipelines { - dir, err := pipeline.ConfigFileDirectory() - if err != nil { - return fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err) - } - - for i := 0; i < len(pipeline.Libraries); i++ { - err := m.translatePipelineLibrary(dir, b, &pipeline.Libraries[i]) - if err != nil { - return err - } + return err } } diff --git a/bundle/config/mutator/translate_paths_artifacts.go b/bundle/config/mutator/translate_paths_artifacts.go new file mode 100644 index 000000000..91e8397cb --- /dev/null +++ b/bundle/config/mutator/translate_paths_artifacts.go @@ -0,0 +1,42 @@ +package mutator + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" +) + +func transformArtifactPath(resource any, dir string) *transformer { + artifact, ok := resource.(*config.Artifact) + if !ok { + return nil + } + + return &transformer{ + dir, + &artifact.Path, + "artifacts.path", + translateNoOp, + } +} + +func applyArtifactTransformers(m *translatePaths, b *bundle.Bundle) error { + artifactTransformers := []transformFunc{ + transformArtifactPath, + } + + for key, artifact := range b.Config.Artifacts { + dir, err := artifact.ConfigFileDirectory() + if err != nil { + return fmt.Errorf("unable to determine directory for artifact %s: %w", key, err) + } + + err = m.applyTransformers(artifactTransformers, b, artifact, dir) + if err != nil { + return err + } + } + + return nil +} diff --git a/bundle/config/mutator/translate_paths_jobs.go b/bundle/config/mutator/translate_paths_jobs.go new file mode 100644 index 000000000..b94df5e2e --- /dev/null +++ b/bundle/config/mutator/translate_paths_jobs.go @@ -0,0 +1,103 @@ +package mutator + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +func transformNotebookTask(resource any, dir string) *transformer { + task, ok := resource.(*jobs.Task) + if !ok || task.NotebookTask == nil { + return nil + } + + return &transformer{ + dir, + &task.NotebookTask.NotebookPath, + "tasks.notebook_task.notebook_path", + translateNotebookPath, + } +} + +func transformSparkTask(resource any, dir string) *transformer { + task, ok := resource.(*jobs.Task) + if !ok || task.SparkPythonTask == nil { + return nil + } + + return &transformer{ + dir, + &task.SparkPythonTask.PythonFile, + "tasks.spark_python_task.python_file", + translateFilePath, + } +} + +func transformWhlLibrary(resource any, dir string) *transformer { + library, ok := resource.(*compute.Library) + if !ok || library.Whl == "" { + return nil + } + + return &transformer{ + dir, + &library.Whl, + "libraries.whl", + translateNoOp, + } +} + +func transformJarLibrary(resource any, dir string) *transformer { + library, ok := resource.(*compute.Library) + if !ok || library.Jar == "" { + return nil + } + + return &transformer{ + dir, + &library.Jar, + "libraries.jar", + translateFilePath, + } +} + +func applyJobTransformers(m *translatePaths, b *bundle.Bundle) error { + jobTransformers := []transformFunc{ + transformNotebookTask, + transformSparkTask, + transformWhlLibrary, + transformJarLibrary, + } + + for key, job := range b.Config.Resources.Jobs { + dir, err := job.ConfigFileDirectory() + if err != nil { + return fmt.Errorf("unable to determine directory for job %s: %w", key, err) + } + + // Do not translate job task paths if using git source + if job.GitSource != nil { + continue + } + + for i := 0; i < len(job.Tasks); i++ { + task := &job.Tasks[i] + err := m.applyTransformers(jobTransformers, b, task, dir) + if err != nil { + return err + } + for j := 0; j < len(task.Libraries); j++ { + library := &task.Libraries[j] + err := m.applyTransformers(jobTransformers, b, library, dir) + if err != nil { + return err + } + } + } + } + + return nil +} diff --git a/bundle/config/mutator/translate_paths_pipelines.go b/bundle/config/mutator/translate_paths_pipelines.go new file mode 100644 index 000000000..1afdb9d51 --- /dev/null +++ b/bundle/config/mutator/translate_paths_pipelines.go @@ -0,0 +1,60 @@ +package mutator + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/databricks-sdk-go/service/pipelines" +) + +func transformLibraryNotebook(resource any, dir string) *transformer { + library, ok := resource.(*pipelines.PipelineLibrary) + if !ok || library.Notebook == nil { + return nil + } + + return &transformer{ + dir, + &library.Notebook.Path, + "libraries.notebook.path", + translateNotebookPath, + } +} + +func transformLibraryFile(resource any, dir string) *transformer { + library, ok := resource.(*pipelines.PipelineLibrary) + if !ok || library.File == nil { + return nil + } + + return &transformer{ + dir, + &library.File.Path, + "libraries.file.path", + translateFilePath, + } +} + +func applyPipelineTransformers(m *translatePaths, b *bundle.Bundle) error { + pipelineTransformers := []transformFunc{ + transformLibraryNotebook, + transformLibraryFile, + } + + for key, pipeline := range b.Config.Resources.Pipelines { + dir, err := pipeline.ConfigFileDirectory() + if err != nil { + return fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err) + } + + for i := 0; i < len(pipeline.Libraries); i++ { + library := &pipeline.Libraries[i] + err := m.applyTransformers(pipelineTransformers, b, library, dir) + if err != nil { + return err + } + } + } + + return nil +} diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index b87f4f676..f7edee30a 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -9,7 +9,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/stretchr/testify/assert" @@ -43,7 +45,7 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -103,6 +105,7 @@ func TestTranslatePaths(t *testing.T) { touchNotebookFile(t, filepath.Join(dir, "my_job_notebook.py")) touchNotebookFile(t, filepath.Join(dir, "my_pipeline_notebook.py")) touchEmptyFile(t, filepath.Join(dir, "my_python_file.py")) + touchEmptyFile(t, filepath.Join(dir, "dist", "task.jar")) bundle := &bundle.Bundle{ Config: config.Root{ @@ -113,7 +116,7 @@ func TestTranslatePaths(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -122,6 +125,9 @@ func TestTranslatePaths(t *testing.T) { NotebookTask: &jobs.NotebookTask{ NotebookPath: "./my_job_notebook.py", }, + Libraries: []compute.Library{ + {Whl: "./dist/task.whl"}, + }, }, { NotebookTask: &jobs.NotebookTask{ @@ -143,13 +149,29 @@ func TestTranslatePaths(t *testing.T) { PythonFile: "./my_python_file.py", }, }, + { + SparkJarTask: &jobs.SparkJarTask{ + MainClassName: "HelloWorld", + }, + Libraries: []compute.Library{ + {Jar: "./dist/task.jar"}, + }, + }, + { + SparkJarTask: &jobs.SparkJarTask{ + MainClassName: "HelloWorldRemote", + }, + Libraries: []compute.Library{ + {Jar: "dbfs:/bundle/dist/task_remote.jar"}, + }, + }, }, }, }, }, Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -194,6 +216,11 @@ func TestTranslatePaths(t *testing.T) { "/bundle/my_job_notebook", bundle.Config.Resources.Jobs["job"].Tasks[0].NotebookTask.NotebookPath, ) + assert.Equal( + t, + filepath.Join("dist", "task.whl"), + bundle.Config.Resources.Jobs["job"].Tasks[0].Libraries[0].Whl, + ) assert.Equal( t, "/Users/jane.doe@databricks.com/doesnt_exist.py", @@ -209,6 +236,16 @@ func TestTranslatePaths(t *testing.T) { "/bundle/my_python_file.py", bundle.Config.Resources.Jobs["job"].Tasks[4].SparkPythonTask.PythonFile, ) + assert.Equal( + t, + "/bundle/dist/task.jar", + bundle.Config.Resources.Jobs["job"].Tasks[5].Libraries[0].Jar, + ) + assert.Equal( + t, + "dbfs:/bundle/dist/task_remote.jar", + bundle.Config.Resources.Jobs["job"].Tasks[6].Libraries[0].Jar, + ) // Assert that the path in the libraries now refer to the artifact. assert.Equal( @@ -236,6 +273,7 @@ func TestTranslatePaths(t *testing.T) { func TestTranslatePathsInSubdirectories(t *testing.T) { dir := t.TempDir() touchEmptyFile(t, filepath.Join(dir, "job", "my_python_file.py")) + touchEmptyFile(t, filepath.Join(dir, "job", "dist", "task.jar")) touchEmptyFile(t, filepath.Join(dir, "pipeline", "my_python_file.py")) bundle := &bundle.Bundle{ @@ -247,7 +285,7 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "job/resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -257,13 +295,21 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { PythonFile: "./my_python_file.py", }, }, + { + SparkJarTask: &jobs.SparkJarTask{ + MainClassName: "HelloWorld", + }, + Libraries: []compute.Library{ + {Jar: "./dist/task.jar"}, + }, + }, }, }, }, }, Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "pipeline/resource.yml"), }, @@ -290,6 +336,11 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { "/bundle/job/my_python_file.py", bundle.Config.Resources.Jobs["job"].Tasks[0].SparkPythonTask.PythonFile, ) + assert.Equal( + t, + "/bundle/job/dist/task.jar", + bundle.Config.Resources.Jobs["job"].Tasks[1].Libraries[0].Jar, + ) assert.Equal( t, @@ -310,7 +361,7 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "../resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -341,7 +392,7 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, JobSettings: &jobs.JobSettings{ @@ -372,7 +423,7 @@ func TestJobFileDoesNotExistError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, JobSettings: &jobs.JobSettings{ @@ -403,7 +454,7 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -434,7 +485,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -469,7 +520,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -504,7 +555,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -539,7 +590,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -574,7 +625,7 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ diff --git a/bundle/config/mutator/validate_git_details.go b/bundle/config/mutator/validate_git_details.go new file mode 100644 index 000000000..116498bfc --- /dev/null +++ b/bundle/config/mutator/validate_git_details.go @@ -0,0 +1,29 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" +) + +type validateGitDetails struct{} + +func ValidateGitDetails() *validateGitDetails { + return &validateGitDetails{} +} + +func (m *validateGitDetails) Name() string { + return "ValidateGitDetails" +} + +func (m *validateGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { + if b.Config.Bundle.Git.Branch == "" || b.Config.Bundle.Git.ActualBranch == "" { + return nil + } + + if b.Config.Bundle.Git.Branch != b.Config.Bundle.Git.ActualBranch && !b.Config.Bundle.Force { + return fmt.Errorf("not on the right Git branch:\n expected according to configuration: %s\n actual: %s\nuse --force to override", b.Config.Bundle.Git.Branch, b.Config.Bundle.Git.ActualBranch) + } + return nil +} diff --git a/bundle/config/mutator/validate_git_details_test.go b/bundle/config/mutator/validate_git_details_test.go new file mode 100644 index 000000000..252964eeb --- /dev/null +++ b/bundle/config/mutator/validate_git_details_test.go @@ -0,0 +1,65 @@ +package mutator + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/assert" +) + +func TestValidateGitDetailsMatchingBranches(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Git: config.Git{ + Branch: "main", + ActualBranch: "main", + }, + }, + }, + } + + m := ValidateGitDetails() + err := m.Apply(context.Background(), bundle) + + assert.NoError(t, err) +} + +func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Git: config.Git{ + Branch: "main", + ActualBranch: "feature", + }, + }, + }, + } + + m := ValidateGitDetails() + err := m.Apply(context.Background(), bundle) + + expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override" + assert.EqualError(t, err, expectedError) +} + +func TestValidateGitDetailsNotUsingGit(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Git: config.Git{ + Branch: "main", + ActualBranch: "", + }, + }, + }, + } + + m := ValidateGitDetails() + err := m.Apply(context.Background(), bundle) + + assert.NoError(t, err) +} diff --git a/bundle/config/resources/pkg.go b/bundle/config/paths/paths.go similarity index 95% rename from bundle/config/resources/pkg.go rename to bundle/config/paths/paths.go index 5cf54a06b..c2cbcb7dd 100644 --- a/bundle/config/resources/pkg.go +++ b/bundle/config/paths/paths.go @@ -1,4 +1,4 @@ -package resources +package paths import ( "fmt" diff --git a/bundle/config/resources.go b/bundle/config/resources.go index fc86647eb..c239b510b 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -11,8 +11,9 @@ type Resources struct { Jobs map[string]*resources.Job `json:"jobs,omitempty"` Pipelines map[string]*resources.Pipeline `json:"pipelines,omitempty"` - Models map[string]*resources.MlflowModel `json:"models,omitempty"` - Experiments map[string]*resources.MlflowExperiment `json:"experiments,omitempty"` + Models map[string]*resources.MlflowModel `json:"models,omitempty"` + Experiments map[string]*resources.MlflowExperiment `json:"experiments,omitempty"` + ModelServingEndpoints map[string]*resources.ModelServingEndpoint `json:"model_serving_endpoints,omitempty"` } type UniqueResourceIdTracker struct { @@ -93,6 +94,19 @@ func (r *Resources) VerifyUniqueResourceIdentifiers() (*UniqueResourceIdTracker, tracker.Type[k] = "mlflow_experiment" tracker.ConfigPath[k] = r.Experiments[k].ConfigFilePath } + for k := range r.ModelServingEndpoints { + if _, ok := tracker.Type[k]; ok { + return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", + k, + tracker.Type[k], + tracker.ConfigPath[k], + "model_serving_endpoint", + r.ModelServingEndpoints[k].ConfigFilePath, + ) + } + tracker.Type[k] = "model_serving_endpoint" + tracker.ConfigPath[k] = r.ModelServingEndpoints[k].ConfigFilePath + } return tracker, nil } @@ -112,4 +126,18 @@ func (r *Resources) SetConfigFilePath(path string) { for _, e := range r.Experiments { e.ConfigFilePath = path } + for _, e := range r.ModelServingEndpoints { + e.ConfigFilePath = path + } +} + +// MergeJobClusters iterates over all jobs and merges their job clusters. +// This is called after applying the target overrides. +func (r *Resources) MergeJobClusters() error { + for _, job := range r.Jobs { + if err := job.MergeJobClusters(); err != nil { + return err + } + } + return nil } diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index a1ea3855e..66705afb2 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -1,12 +1,49 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/jobs" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/imdario/mergo" +) type Job struct { ID string `json:"id,omitempty" bundle:"readonly"` Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *jobs.JobSettings } + +// MergeJobClusters merges job clusters with the same key. +// The job clusters field is a slice, and as such, overrides are appended to it. +// We can identify a job cluster by its key, however, so we can use this key +// to figure out which definitions are actually overrides and merge them. +func (j *Job) MergeJobClusters() error { + keys := make(map[string]*jobs.JobCluster) + output := make([]jobs.JobCluster, 0, len(j.JobClusters)) + + // Target overrides are always appended, so we can iterate in natural order to + // first find the base definition, and merge instances we encounter later. + for i := range j.JobClusters { + key := j.JobClusters[i].JobClusterKey + + // Register job cluster with key if not yet seen before. + ref, ok := keys[key] + if !ok { + output = append(output, j.JobClusters[i]) + keys[key] = &j.JobClusters[i] + continue + } + + // Merge this instance into the reference. + err := mergo.Merge(ref, &j.JobClusters[i], mergo.WithOverride, mergo.WithAppendSlice) + if err != nil { + return err + } + } + + // Overwrite resulting slice. + j.JobClusters = output + return nil +} diff --git a/bundle/config/resources/job_test.go b/bundle/config/resources/job_test.go new file mode 100644 index 000000000..2ff3205e0 --- /dev/null +++ b/bundle/config/resources/job_test.go @@ -0,0 +1,57 @@ +package resources + +import ( + "testing" + + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestJobMergeJobClusters(t *testing.T) { + j := &Job{ + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + JobClusterKey: "foo", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "13.3.x-scala2.12", + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + }, + }, + { + JobClusterKey: "bar", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "10.4.x-scala2.12", + }, + }, + { + JobClusterKey: "foo", + NewCluster: &compute.ClusterSpec{ + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + }, + }, + }, + } + + err := j.MergeJobClusters() + require.NoError(t, err) + + assert.Len(t, j.JobClusters, 2) + assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey) + assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey) + + // This job cluster was merged with a subsequent one. + jc0 := j.JobClusters[0].NewCluster + assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion) + assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId) + assert.Equal(t, 4, jc0.NumWorkers) + + // This job cluster was left untouched. + jc1 := j.JobClusters[1].NewCluster + assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion) +} diff --git a/bundle/config/resources/mlflow_experiment.go b/bundle/config/resources/mlflow_experiment.go index ebef039a8..d843cf226 100644 --- a/bundle/config/resources/mlflow_experiment.go +++ b/bundle/config/resources/mlflow_experiment.go @@ -1,11 +1,14 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/ml" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/ml" +) type MlflowExperiment struct { Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *ml.Experiment } diff --git a/bundle/config/resources/mlflow_model.go b/bundle/config/resources/mlflow_model.go index 31c72f6b0..92617c95a 100644 --- a/bundle/config/resources/mlflow_model.go +++ b/bundle/config/resources/mlflow_model.go @@ -1,11 +1,14 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/ml" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/ml" +) type MlflowModel struct { Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *ml.Model } diff --git a/bundle/config/resources/model_serving_endpoint.go b/bundle/config/resources/model_serving_endpoint.go new file mode 100644 index 000000000..dccecaa6f --- /dev/null +++ b/bundle/config/resources/model_serving_endpoint.go @@ -0,0 +1,24 @@ +package resources + +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/serving" +) + +type ModelServingEndpoint struct { + // This represents the input args for terraform, and will get converted + // to a HCL representation for CRUD + *serving.CreateServingEndpoint + + // This represents the id (ie serving_endpoint_id) that can be used + // as a reference in other resources. This value is returned by terraform. + ID string + + // Local path where the bundle is defined. All bundle resources include + // this for interpolation purposes. + paths.Paths + + // This is a resource agnostic implementation of permissions for ACLs. + // Implementation could be different based on the resource type. + Permissions []Permission `json:"permissions,omitempty"` +} diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 96efc2c4f..d3a51c575 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -1,12 +1,15 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/pipelines" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/pipelines" +) type Pipeline struct { ID string `json:"id,omitempty" bundle:"readonly"` Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *pipelines.PipelineSpec } diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 63285bf94..82cb9f454 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -3,6 +3,7 @@ package config import ( "testing" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" "github.com/stretchr/testify/assert" ) @@ -11,21 +12,21 @@ func TestVerifyUniqueResourceIdentifiers(t *testing.T) { r := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo.yml", }, }, }, Models: map[string]*resources.MlflowModel{ "bar": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "bar.yml", }, }, }, Experiments: map[string]*resources.MlflowExperiment{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo2.yml", }, }, @@ -39,14 +40,14 @@ func TestVerifySafeMerge(t *testing.T) { r := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo.yml", }, }, }, Models: map[string]*resources.MlflowModel{ "bar": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "bar.yml", }, }, @@ -55,7 +56,7 @@ func TestVerifySafeMerge(t *testing.T) { other := Resources{ Pipelines: map[string]*resources.Pipeline{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo2.yml", }, }, @@ -69,14 +70,14 @@ func TestVerifySafeMergeForSameResourceType(t *testing.T) { r := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo.yml", }, }, }, Models: map[string]*resources.MlflowModel{ "bar": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "bar.yml", }, }, @@ -85,7 +86,7 @@ func TestVerifySafeMergeForSameResourceType(t *testing.T) { other := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo2.yml", }, }, diff --git a/bundle/config/root.go b/bundle/config/root.go index 5ee337d30..465d8a62e 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -7,16 +7,44 @@ import ( "strings" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/ghodss/yaml" "github.com/imdario/mergo" ) -// FileName is the name of bundle configuration file. -const FileName = "bundle.yml" +type ConfigFileNames []string + +// FileNames contains allowed names of bundle configuration files. +var FileNames = ConfigFileNames{"databricks.yml", "databricks.yaml", "bundle.yml", "bundle.yaml"} + +func (c ConfigFileNames) FindInPath(path string) (string, error) { + result := "" + var firstErr error + + for _, file := range c { + filePath := filepath.Join(path, file) + _, err := os.Stat(filePath) + if err == nil { + if result != "" { + return "", fmt.Errorf("multiple bundle root configuration files found in %s", path) + } + result = filePath + } else { + if firstErr == nil { + firstErr = err + } + } + } + + if result == "" { + return "", firstErr + } + return result, nil +} type Root struct { // Path contains the directory path to the root of the bundle. - // It is set when loading `bundle.yml`. + // It is set when loading `databricks.yml`. Path string `json:"-" bundle:"readonly"` // Contains user defined variables @@ -24,14 +52,11 @@ type Root struct { // Bundle contains details about this bundle, such as its name, // version of the spec (TODO), default cluster, default warehouse, etc. - Bundle Bundle `json:"bundle"` + Bundle Bundle `json:"bundle,omitempty"` // Include specifies a list of patterns of file names to load and - // merge into the this configuration. If not set in `bundle.yml`, - // it defaults to loading `*.yml` and `*/*.yml`. - // - // Also see [mutator.DefineDefaultInclude]. - // + // merge into the this configuration. Only includes defined in the root + // `databricks.yml` are processed. Defaults to an empty list. Include []string `json:"include,omitempty"` // Workspace contains details about the workspace to connect to @@ -39,17 +64,28 @@ type Root struct { Workspace Workspace `json:"workspace,omitempty"` // Artifacts contains a description of all code artifacts in this bundle. - Artifacts map[string]*Artifact `json:"artifacts,omitempty"` + Artifacts Artifacts `json:"artifacts,omitempty"` // Resources contains a description of all Databricks resources // to deploy in this bundle (e.g. jobs, pipelines, etc.). Resources Resources `json:"resources,omitempty"` - // Environments can be used to differentiate settings and resources between - // bundle deployment environments (e.g. development, staging, production). + // Targets can be used to differentiate settings and resources between + // bundle deployment targets (e.g. development, staging, production). // If not specified, the code below initializes this field with a - // single default-initialized environment called "default". - Environments map[string]*Environment `json:"environments,omitempty"` + // single default-initialized target called "default". + Targets map[string]*Target `json:"targets,omitempty"` + + // DEPRECATED. Left for backward compatibility with Targets + Environments map[string]*Target `json:"environments,omitempty"` + + // Sync section specifies options for files synchronization + Sync Sync `json:"sync,omitempty"` + + // RunAs section allows to define an execution identity for jobs and pipelines runs + RunAs *jobs.JobRunAs `json:"run_as,omitempty"` + + Experimental *Experimental `json:"experimental,omitempty"` } func Load(path string) (*Root, error) { @@ -62,7 +98,10 @@ func Load(path string) (*Root, error) { // If we were given a directory, assume this is the bundle root. if stat.IsDir() { - path = filepath.Join(path, FileName) + path, err = FileNames.FindInPath(path) + if err != nil { + return nil, err + } } if err := r.Load(path); err != nil { @@ -76,14 +115,21 @@ func Load(path string) (*Root, error) { // was loaded from in configuration leafs that require it. func (r *Root) SetConfigFilePath(path string) { r.Resources.SetConfigFilePath(path) - if r.Environments != nil { - for _, env := range r.Environments { + if r.Artifacts != nil { + r.Artifacts.SetConfigFilePath(path) + } + + if r.Targets != nil { + for _, env := range r.Targets { if env == nil { continue } if env.Resources != nil { env.Resources.SetConfigFilePath(path) } + if env.Artifacts != nil { + env.Artifacts.SetConfigFilePath(path) + } } } } @@ -121,6 +167,15 @@ func (r *Root) Load(path string) error { return fmt.Errorf("failed to load %s: %w", path, err) } + if r.Environments != nil && r.Targets != nil { + return fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path) + } + + if r.Environments != nil { + //TODO: add a command line notice that this is a deprecated option. + r.Targets = r.Environments + } + r.Path = filepath.Dir(path) r.SetConfigFilePath(path) @@ -129,57 +184,68 @@ func (r *Root) Load(path string) error { } func (r *Root) Merge(other *Root) error { + err := r.Sync.Merge(r, other) + if err != nil { + return err + } + other.Sync = Sync{} + // TODO: when hooking into merge semantics, disallow setting path on the target instance. other.Path = "" // Check for safe merge, protecting against duplicate resource identifiers - err := r.Resources.VerifySafeMerge(&other.Resources) + err = r.Resources.VerifySafeMerge(&other.Resources) if err != nil { return err } // TODO: define and test semantics for merging. - return mergo.MergeWithOverwrite(r, other) + return mergo.Merge(r, other, mergo.WithOverride) } -func (r *Root) MergeEnvironment(env *Environment) error { +func (r *Root) MergeTargetOverrides(target *Target) error { var err error - // Environment may be nil if it's empty. - if env == nil { + // Target may be nil if it's empty. + if target == nil { return nil } - if env.Bundle != nil { - err = mergo.MergeWithOverwrite(&r.Bundle, env.Bundle) + if target.Bundle != nil { + err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) if err != nil { return err } } - if env.Workspace != nil { - err = mergo.MergeWithOverwrite(&r.Workspace, env.Workspace) + if target.Workspace != nil { + err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) if err != nil { return err } } - if env.Artifacts != nil { - err = mergo.Merge(&r.Artifacts, env.Artifacts, mergo.WithAppendSlice) + if target.Artifacts != nil { + err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) if err != nil { return err } } - if env.Resources != nil { - err = mergo.Merge(&r.Resources, env.Resources, mergo.WithAppendSlice) + if target.Resources != nil { + err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) + if err != nil { + return err + } + + err = r.Resources.MergeJobClusters() if err != nil { return err } } - if env.Variables != nil { - for k, v := range env.Variables { + if target.Variables != nil { + for k, v := range target.Variables { variable, ok := r.Variables[k] if !ok { return fmt.Errorf("variable %s is not defined but is assigned a value", k) @@ -190,12 +256,28 @@ func (r *Root) MergeEnvironment(env *Environment) error { } } - if env.Mode != "" { - r.Bundle.Mode = env.Mode + if target.RunAs != nil { + r.RunAs = target.RunAs } - if env.ComputeID != "" { - r.Bundle.ComputeID = env.ComputeID + if target.Mode != "" { + r.Bundle.Mode = target.Mode + } + + if target.ComputeID != "" { + r.Bundle.ComputeID = target.ComputeID + } + + git := &r.Bundle.Git + if target.Git.Branch != "" { + git.Branch = target.Git.Branch + git.Inferred = false + } + if target.Git.Commit != "" { + git.Commit = target.Git.Commit + } + if target.Git.OriginURL != "" { + git.OriginURL = target.Git.OriginURL } return nil diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index 818e89a2d..6e2636678 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -2,7 +2,11 @@ package config import ( "encoding/json" + "os" + "path/filepath" "reflect" + "runtime" + "strings" "testing" "github.com/databricks/cli/bundle/config/variable" @@ -26,7 +30,7 @@ func TestRootMarshalUnmarshal(t *testing.T) { func TestRootLoad(t *testing.T) { root := &Root{} - err := root.Load("../tests/basic/bundle.yml") + err := root.Load("../tests/basic/databricks.yml") require.NoError(t, err) assert.Equal(t, "basic", root.Bundle.Name) } @@ -53,7 +57,7 @@ func TestRootMergeStruct(t *testing.T) { func TestRootMergeMap(t *testing.T) { root := &Root{ Path: "path", - Environments: map[string]*Environment{ + Targets: map[string]*Target{ "development": { Workspace: &Workspace{ Host: "foo", @@ -64,7 +68,7 @@ func TestRootMergeMap(t *testing.T) { } other := &Root{ Path: "path", - Environments: map[string]*Environment{ + Targets: map[string]*Target{ "development": { Workspace: &Workspace{ Host: "bar", @@ -73,18 +77,18 @@ func TestRootMergeMap(t *testing.T) { }, } assert.NoError(t, root.Merge(other)) - assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Environments["development"].Workspace) + assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Targets["development"].Workspace) } func TestDuplicateIdOnLoadReturnsError(t *testing.T) { root := &Root{} - err := root.Load("./testdata/duplicate_resource_names_in_root/bundle.yml") - assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/bundle.yml, pipeline at ./testdata/duplicate_resource_names_in_root/bundle.yml)") + err := root.Load("./testdata/duplicate_resource_names_in_root/databricks.yml") + assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)") } func TestDuplicateIdOnMergeReturnsError(t *testing.T) { root := &Root{} - err := root.Load("./testdata/duplicate_resource_name_in_subconfiguration/bundle.yml") + err := root.Load("./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml") require.NoError(t, err) other := &Root{} @@ -92,7 +96,7 @@ func TestDuplicateIdOnMergeReturnsError(t *testing.T) { require.NoError(t, err) err = root.Merge(other) - assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/bundle.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)") + assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)") } func TestInitializeVariables(t *testing.T) { @@ -155,11 +159,70 @@ func TestInitializeVariablesUndefinedVariables(t *testing.T) { assert.ErrorContains(t, err, "variable bar has not been defined") } -func TestRootMergeEnvironmentWithMode(t *testing.T) { +func TestRootMergeTargetOverridesWithMode(t *testing.T) { root := &Root{ Bundle: Bundle{}, } - env := &Environment{Mode: Development} - require.NoError(t, root.MergeEnvironment(env)) + env := &Target{Mode: Development} + require.NoError(t, root.MergeTargetOverrides(env)) assert.Equal(t, Development, root.Bundle.Mode) } + +func TestConfigFileNames_FindInPath(t *testing.T) { + testCases := []struct { + name string + files []string + expected string + err string + }{ + { + name: "file found", + files: []string{"databricks.yml"}, + expected: "BASE/databricks.yml", + err: "", + }, + { + name: "file found", + files: []string{"bundle.yml"}, + expected: "BASE/bundle.yml", + err: "", + }, + { + name: "multiple files found", + files: []string{"databricks.yaml", "bundle.yml"}, + expected: "", + err: "multiple bundle root configuration files found", + }, + { + name: "file not found", + files: []string{}, + expected: "", + err: "no such file or directory", + }, + } + + if runtime.GOOS == "windows" { + testCases[3].err = "The system cannot find the file specified." + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + projectDir := t.TempDir() + for _, file := range tc.files { + f1, _ := os.Create(filepath.Join(projectDir, file)) + f1.Close() + } + + result, err := FileNames.FindInPath(projectDir) + + expected := strings.Replace(tc.expected, "BASE/", projectDir+string(os.PathSeparator), 1) + assert.Equal(t, expected, result) + + if tc.err != "" { + assert.ErrorContains(t, err, tc.err) + } else { + assert.NoError(t, err) + } + }) + } +} diff --git a/bundle/config/sync.go b/bundle/config/sync.go new file mode 100644 index 000000000..6ba2603c4 --- /dev/null +++ b/bundle/config/sync.go @@ -0,0 +1,31 @@ +package config + +import "path/filepath" + +type Sync struct { + // Include contains a list of globs evaluated relative to the bundle root path + // to explicitly include files that were excluded by the user's gitignore. + Include []string `json:"include,omitempty"` + + // Exclude contains a list of globs evaluated relative to the bundle root path + // to explicitly exclude files that were included by + // 1) the default that observes the user's gitignore, or + // 2) the `Include` field above. + Exclude []string `json:"exclude,omitempty"` +} + +func (s *Sync) Merge(root *Root, other *Root) error { + path, err := filepath.Rel(root.Path, other.Path) + if err != nil { + return err + } + for _, include := range other.Sync.Include { + s.Include = append(s.Include, filepath.Join(path, include)) + } + + for _, exclude := range other.Sync.Exclude { + s.Exclude = append(s.Exclude, filepath.Join(path, exclude)) + } + + return nil +} diff --git a/bundle/config/target.go b/bundle/config/target.go new file mode 100644 index 000000000..2489efc33 --- /dev/null +++ b/bundle/config/target.go @@ -0,0 +1,50 @@ +package config + +import "github.com/databricks/databricks-sdk-go/service/jobs" + +type Mode string + +// Target defines overrides for a single target. +// This structure is recursively merged into the root configuration. +type Target struct { + // Default marks that this target must be used if one isn't specified + // by the user (through target variable or command line argument). + Default bool `json:"default,omitempty"` + + // Determines the mode of the target. + // For example, 'mode: development' can be used for deployments for + // development purposes. + Mode Mode `json:"mode,omitempty"` + + // Overrides the compute used for jobs and other supported assets. + ComputeID string `json:"compute_id,omitempty"` + + Bundle *Bundle `json:"bundle,omitempty"` + + Workspace *Workspace `json:"workspace,omitempty"` + + Artifacts Artifacts `json:"artifacts,omitempty"` + + Resources *Resources `json:"resources,omitempty"` + + // Override default values for defined variables + // Does not permit defining new variables or redefining existing ones + // in the scope of an target + Variables map[string]string `json:"variables,omitempty"` + + Git Git `json:"git,omitempty"` + + RunAs *jobs.JobRunAs `json:"run_as,omitempty"` +} + +const ( + // Development mode: deployments done purely for running things in development. + // Any deployed resources will be marked as "dev" and might be hidden or cleaned up. + Development Mode = "development" + + // Production mode: deployments done for production purposes. + // Any deployed resources will not be changed but this mode will enable + // various strictness checks to make sure that a deployment is correctly setup + // for production purposes. + Production Mode = "production" +) diff --git a/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/bundle.yml b/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml similarity index 100% rename from bundle/config/testdata/duplicate_resource_name_in_subconfiguration/bundle.yml rename to bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml diff --git a/bundle/config/testdata/duplicate_resource_names_in_root/bundle.yml b/bundle/config/testdata/duplicate_resource_names_in_root/databricks.yml similarity index 100% rename from bundle/config/testdata/duplicate_resource_names_in_root/bundle.yml rename to bundle/config/testdata/duplicate_resource_names_in_root/databricks.yml diff --git a/bundle/config/variable/variable.go b/bundle/config/variable/variable.go index 132920bb9..73925d432 100644 --- a/bundle/config/variable/variable.go +++ b/bundle/config/variable/variable.go @@ -18,7 +18,7 @@ type Variable struct { // resolved in the following priority order (from highest to lowest) // // 1. Command line flag. For example: `--var="foo=bar"` - // 2. Environment variable. eg: BUNDLE_VAR_foo=bar + // 2. Target variable. eg: BUNDLE_VAR_foo=bar // 3. Default value as defined in the applicable environments block // 4. Default value defined in variable definition // 5. Throw error, since if no default value is defined, then the variable diff --git a/bundle/config/workspace.go b/bundle/config/workspace.go index 1b6dc4cd5..90cd59c6f 100644 --- a/bundle/config/workspace.go +++ b/bundle/config/workspace.go @@ -21,8 +21,13 @@ type Workspace struct { // // Generic attributes. - Host string `json:"host,omitempty"` - Profile string `json:"profile,omitempty"` + Host string `json:"host,omitempty"` + Profile string `json:"profile,omitempty"` + AuthType string `json:"auth_type,omitempty"` + MetadataServiceURL string `json:"metadata_service_url,omitempty" bundle:"internal"` + + // OAuth specific attributes. + ClientID string `json:"client_id,omitempty"` // Google specific attributes. GoogleServiceAccount string `json:"google_service_account,omitempty"` @@ -37,10 +42,10 @@ type Workspace struct { // CurrentUser holds the current user. // This is set after configuration initialization. - CurrentUser *iam.User `json:"current_user,omitempty" bundle:"readonly"` + CurrentUser *User `json:"current_user,omitempty" bundle:"readonly"` // Remote workspace base path for deployment state, for artifacts, as synchronization target. - // This defaults to "~/.bundle/${bundle.name}/${bundle.environment}" where "~" expands to + // This defaults to "~/.bundle/${bundle.name}/${bundle.target}" where "~" expands to // the current user's home directory in the workspace (e.g. `/Users/jane@doe.com`). RootPath string `json:"root_path,omitempty"` @@ -57,11 +62,23 @@ type Workspace struct { StatePath string `json:"state_path,omitempty"` } +type User struct { + // A short name for the user, based on the user's UserName. + ShortName string `json:"short_name,omitempty" bundle:"readonly"` + + *iam.User +} + func (w *Workspace) Client() (*databricks.WorkspaceClient, error) { cfg := databricks.Config{ // Generic - Host: w.Host, - Profile: w.Profile, + Host: w.Host, + Profile: w.Profile, + AuthType: w.AuthType, + MetadataServiceURL: w.MetadataServiceURL, + + // OAuth + ClientID: w.ClientID, // Google GoogleServiceAccount: w.GoogleServiceAccount, diff --git a/bundle/deploy/files/delete.go b/bundle/deploy/files/delete.go index 1f103bbd0..9f7ad4d41 100644 --- a/bundle/deploy/files/delete.go +++ b/bundle/deploy/files/delete.go @@ -27,7 +27,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { red := color.New(color.FgRed).SprintFunc() if !b.AutoApprove { - proceed, err := cmdio.Ask(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?: ", b.Config.Workspace.RootPath, red("deleted permanently!"))) + proceed, err := cmdio.AskYesOrNo(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!"))) if err != nil { return err } diff --git a/bundle/deploy/files/sync.go b/bundle/deploy/files/sync.go index 77c64e529..ff3d78d07 100644 --- a/bundle/deploy/files/sync.go +++ b/bundle/deploy/files/sync.go @@ -9,15 +9,24 @@ import ( ) func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) { - cacheDir, err := b.CacheDir() + cacheDir, err := b.CacheDir(ctx) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } + includes, err := b.GetSyncIncludePatterns(ctx) + if err != nil { + return nil, fmt.Errorf("cannot get list of sync includes: %w", err) + } + opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, - Full: false, + Include: includes, + Exclude: b.Config.Sync.Exclude, + + Full: false, + CurrentUser: b.Config.Workspace.CurrentUser.User, SnapshotBasePath: cacheDir, WorkspaceClient: b.WorkspaceClient(), diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index ac68bd359..7d95e719d 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -53,8 +53,10 @@ func BundleToTerraform(config *config.Root) *schema.Root { tfroot := schema.NewRoot() tfroot.Provider = schema.NewProviders() tfroot.Resource = schema.NewResources() + noResources := true for k, src := range config.Resources.Jobs { + noResources = false var dst schema.ResourceJob conv(src, &dst) @@ -88,6 +90,12 @@ func BundleToTerraform(config *config.Root) *schema.Root { Tag: git.GitTag, } } + + for _, v := range src.Parameters { + var t schema.ResourceJobParameter + conv(v, &t) + dst.Parameter = append(dst.Parameter, t) + } } tfroot.Resource.Job[k] = &dst @@ -100,6 +108,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } for k, src := range config.Resources.Pipelines { + noResources = false var dst schema.ResourcePipeline conv(src, &dst) @@ -127,6 +136,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } for k, src := range config.Resources.Models { + noResources = false var dst schema.ResourceMlflowModel conv(src, &dst) tfroot.Resource.MlflowModel[k] = &dst @@ -139,6 +149,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } for k, src := range config.Resources.Experiments { + noResources = false var dst schema.ResourceMlflowExperiment conv(src, &dst) tfroot.Resource.MlflowExperiment[k] = &dst @@ -150,6 +161,25 @@ func BundleToTerraform(config *config.Root) *schema.Root { } } + for k, src := range config.Resources.ModelServingEndpoints { + noResources = false + var dst schema.ResourceModelServing + conv(src, &dst) + tfroot.Resource.ModelServing[k] = &dst + + // Configure permissions for this resource. + if rp := convPermissions(src.Permissions); rp != nil { + rp.ServingEndpointId = fmt.Sprintf("${databricks_model_serving.%s.serving_endpoint_id}", k) + tfroot.Resource.Permissions["model_serving_"+k] = rp + } + } + + // We explicitly set "resource" to nil to omit it from a JSON encoding. + // This is required because the terraform CLI requires >= 1 resources defined + // if the "resource" property is used in a .tf.json file. + if noResources { + tfroot.Resource = nil + } return tfroot } @@ -185,6 +215,12 @@ func TerraformToBundle(state *tfjson.State, config *config.Root) error { cur := config.Resources.Experiments[resource.Name] conv(tmp, &cur) config.Resources.Experiments[resource.Name] = cur + case "databricks_model_serving": + var tmp schema.ResourceModelServing + conv(resource.AttributeValues, &tmp) + cur := config.Resources.ModelServingEndpoints[resource.Name] + conv(tmp, &cur) + config.Resources.ModelServingEndpoints[resource.Name] = cur case "databricks_permissions": // Ignore; no need to pull these back into the configuration. default: diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 76cec4e79..b6b29f35a 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/databricks/databricks-sdk-go/service/serving" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -26,9 +27,19 @@ func TestConvertJob(t *testing.T) { }, }, GitSource: &jobs.GitSource{ - GitProvider: jobs.GitProviderGithub, + GitProvider: jobs.GitProviderGitHub, GitUrl: "https://github.com/foo/bar", }, + Parameters: []jobs.JobParameterDefinition{ + { + Name: "param1", + Default: "default1", + }, + { + Name: "param2", + Default: "default2", + }, + }, }, } @@ -44,6 +55,9 @@ func TestConvertJob(t *testing.T) { assert.Equal(t, "my job", out.Resource.Job["my_job"].Name) assert.Len(t, out.Resource.Job["my_job"].JobCluster, 1) assert.Equal(t, "https://github.com/foo/bar", out.Resource.Job["my_job"].GitSource.Url) + assert.Len(t, out.Resource.Job["my_job"].Parameter, 2) + assert.Equal(t, "param1", out.Resource.Job["my_job"].Parameter[0].Name) + assert.Equal(t, "param2", out.Resource.Job["my_job"].Parameter[1].Name) assert.Nil(t, out.Data) } @@ -279,3 +293,76 @@ func TestConvertExperimentPermissions(t *testing.T) { assert.Equal(t, "CAN_READ", p.PermissionLevel) } + +func TestConvertModelServing(t *testing.T) { + var src = resources.ModelServingEndpoint{ + CreateServingEndpoint: &serving.CreateServingEndpoint{ + Name: "name", + Config: serving.EndpointCoreConfigInput{ + ServedModels: []serving.ServedModelInput{ + { + ModelName: "model_name", + ModelVersion: "1", + ScaleToZeroEnabled: true, + WorkloadSize: "Small", + }, + }, + TrafficConfig: &serving.TrafficConfig{ + Routes: []serving.Route{ + { + ServedModelName: "model_name-1", + TrafficPercentage: 100, + }, + }, + }, + }, + }, + } + + var config = config.Root{ + Resources: config.Resources{ + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "my_model_serving_endpoint": &src, + }, + }, + } + + out := BundleToTerraform(&config) + resource := out.Resource.ModelServing["my_model_serving_endpoint"] + assert.Equal(t, "name", resource.Name) + assert.Equal(t, "model_name", resource.Config.ServedModels[0].ModelName) + assert.Equal(t, "1", resource.Config.ServedModels[0].ModelVersion) + assert.Equal(t, true, resource.Config.ServedModels[0].ScaleToZeroEnabled) + assert.Equal(t, "Small", resource.Config.ServedModels[0].WorkloadSize) + assert.Equal(t, "model_name-1", resource.Config.TrafficConfig.Routes[0].ServedModelName) + assert.Equal(t, 100, resource.Config.TrafficConfig.Routes[0].TrafficPercentage) + assert.Nil(t, out.Data) +} + +func TestConvertModelServingPermissions(t *testing.T) { + var src = resources.ModelServingEndpoint{ + Permissions: []resources.Permission{ + { + Level: "CAN_VIEW", + UserName: "jane@doe.com", + }, + }, + } + + var config = config.Root{ + Resources: config.Resources{ + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "my_model_serving_endpoint": &src, + }, + }, + } + + out := BundleToTerraform(&config) + assert.NotEmpty(t, out.Resource.Permissions["model_serving_my_model_serving_endpoint"].ServingEndpointId) + assert.Len(t, out.Resource.Permissions["model_serving_my_model_serving_endpoint"].AccessControl, 1) + + p := out.Resource.Permissions["model_serving_my_model_serving_endpoint"].AccessControl[0] + assert.Equal(t, "jane@doe.com", p.UserName) + assert.Equal(t, "CAN_VIEW", p.PermissionLevel) + +} diff --git a/bundle/deploy/terraform/destroy.go b/bundle/deploy/terraform/destroy.go index a71642522..936d8d45a 100644 --- a/bundle/deploy/terraform/destroy.go +++ b/bundle/deploy/terraform/destroy.go @@ -43,7 +43,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { // Ask for confirmation, if needed if !b.Plan.ConfirmApply { red := color.New(color.FgRed).SprintFunc() - b.Plan.ConfirmApply, err = cmdio.Ask(ctx, fmt.Sprintf("This will permanently %s resources! Proceed? [y/n]: ", red("destroy"))) + b.Plan.ConfirmApply, err = cmdio.AskYesOrNo(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy"))) if err != nil { return err } diff --git a/bundle/deploy/terraform/dir.go b/bundle/deploy/terraform/dir.go index 9f83b8da5..b7b086ceb 100644 --- a/bundle/deploy/terraform/dir.go +++ b/bundle/deploy/terraform/dir.go @@ -1,11 +1,13 @@ package terraform import ( + "context" + "github.com/databricks/cli/bundle" ) // Dir returns the Terraform working directory for a given bundle. // The working directory is emphemeral and nested under the bundle's cache directory. -func Dir(b *bundle.Bundle) (string, error) { - return b.CacheDir("terraform") +func Dir(ctx context.Context, b *bundle.Bundle) (string, error) { + return b.CacheDir(ctx, "terraform") } diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index eb3e99d18..aa1dff74e 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -8,9 +8,11 @@ import ( "path/filepath" "runtime" "strings" + "time" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/log" "github.com/hashicorp/go-version" "github.com/hashicorp/hc-install/product" @@ -37,7 +39,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con return tf.ExecPath, nil } - binDir, err := b.CacheDir("bin") + binDir, err := b.CacheDir(context.Background(), "bin") if err != nil { return "", err } @@ -55,10 +57,11 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con } // Download Terraform to private bin directory. - installer := &releases.LatestVersion{ - Product: product.Terraform, - Constraints: version.MustConstraints(version.NewConstraint("<2.0")), - InstallDir: binDir, + installer := &releases.ExactVersion{ + Product: product.Terraform, + Version: version.Must(version.NewVersion("1.5.5")), + InstallDir: binDir, + Timeout: 1 * time.Minute, } execPath, err = installer.Install(ctx) if err != nil { @@ -71,17 +74,25 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con } // This function inherits some environment variables for Terraform CLI. -func inheritEnvVars(env map[string]string) error { +func inheritEnvVars(ctx context.Context, environ map[string]string) error { // Include $HOME in set of environment variables to pass along. - home, ok := os.LookupEnv("HOME") + home, ok := env.Lookup(ctx, "HOME") if ok { - env["HOME"] = home + environ["HOME"] = home + } + + // Include $PATH in set of environment variables to pass along. + // This is necessary to ensure that our Terraform provider can use the + // same auxiliary programs (e.g. `az`, or `gcloud`) as the CLI. + path, ok := env.Lookup(ctx, "PATH") + if ok { + environ["PATH"] = path } // Include $TF_CLI_CONFIG_FILE to override terraform provider in development. - configFile, ok := os.LookupEnv("TF_CLI_CONFIG_FILE") + configFile, ok := env.Lookup(ctx, "TF_CLI_CONFIG_FILE") if ok { - env["TF_CLI_CONFIG_FILE"] = configFile + environ["TF_CLI_CONFIG_FILE"] = configFile } return nil @@ -95,40 +106,40 @@ func inheritEnvVars(env map[string]string) error { // the CLI and its dependencies do not have access to. // // see: os.TempDir for more context -func setTempDirEnvVars(env map[string]string, b *bundle.Bundle) error { +func setTempDirEnvVars(ctx context.Context, environ map[string]string, b *bundle.Bundle) error { switch runtime.GOOS { case "windows": - if v, ok := os.LookupEnv("TMP"); ok { - env["TMP"] = v - } else if v, ok := os.LookupEnv("TEMP"); ok { - env["TEMP"] = v - } else if v, ok := os.LookupEnv("USERPROFILE"); ok { - env["USERPROFILE"] = v + if v, ok := env.Lookup(ctx, "TMP"); ok { + environ["TMP"] = v + } else if v, ok := env.Lookup(ctx, "TEMP"); ok { + environ["TEMP"] = v + } else if v, ok := env.Lookup(ctx, "USERPROFILE"); ok { + environ["USERPROFILE"] = v } else { - tmpDir, err := b.CacheDir("tmp") + tmpDir, err := b.CacheDir(ctx, "tmp") if err != nil { return err } - env["TMP"] = tmpDir + environ["TMP"] = tmpDir } default: // If TMPDIR is not set, we let the process fall back to its default value. - if v, ok := os.LookupEnv("TMPDIR"); ok { - env["TMPDIR"] = v + if v, ok := env.Lookup(ctx, "TMPDIR"); ok { + environ["TMPDIR"] = v } } return nil } // This function passes through all proxy related environment variables. -func setProxyEnvVars(env map[string]string, b *bundle.Bundle) error { +func setProxyEnvVars(ctx context.Context, environ map[string]string, b *bundle.Bundle) error { for _, v := range []string{"http_proxy", "https_proxy", "no_proxy"} { // The case (upper or lower) is notoriously inconsistent for tools on Unix systems. // We therefore try to read both the upper and lower case versions of the variable. for _, v := range []string{strings.ToUpper(v), strings.ToLower(v)} { - if val, ok := os.LookupEnv(v); ok { + if val, ok := env.Lookup(ctx, v); ok { // Only set uppercase version of the variable. - env[strings.ToUpper(v)] = val + environ[strings.ToUpper(v)] = val } } } @@ -147,7 +158,7 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - workingDir, err := Dir(b) + workingDir, err := Dir(ctx, b) if err != nil { return err } @@ -157,31 +168,31 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - env, err := b.AuthEnv() + environ, err := b.AuthEnv() if err != nil { return err } - err = inheritEnvVars(env) + err = inheritEnvVars(ctx, environ) if err != nil { return err } // Set the temporary directory environment variables - err = setTempDirEnvVars(env, b) + err = setTempDirEnvVars(ctx, environ, b) if err != nil { return err } // Set the proxy related environment variables - err = setProxyEnvVars(env, b) + err = setProxyEnvVars(ctx, environ, b) if err != nil { return err } // Configure environment variables for auth for Terraform to use. - log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(env), ", ")) - err = tf.SetEnv(env) + log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", ")) + err = tf.SetEnv(environ) if err != nil { return err } diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index 79e18170e..001e7a220 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -31,7 +31,7 @@ func TestInitEnvironmentVariables(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", Terraform: &config.Terraform{ ExecPath: "terraform", }, @@ -58,7 +58,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -68,7 +68,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // Assert that we pass through TMPDIR. @@ -86,7 +86,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirNotSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -96,7 +96,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirNotSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // Assert that we don't pass through TMPDIR. @@ -112,7 +112,7 @@ func TestSetTempDirEnvVarsForWindowWithAllTmpDirEnvVarsSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -124,7 +124,7 @@ func TestSetTempDirEnvVarsForWindowWithAllTmpDirEnvVarsSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert that we pass through the highest priority env var value @@ -142,7 +142,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileAndTempSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -154,7 +154,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileAndTempSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert that we pass through the highest priority env var value @@ -172,7 +172,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -184,7 +184,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert that we pass through the user profile @@ -202,7 +202,7 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -214,11 +214,11 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert TMP is set to b.CacheDir("tmp") - tmpDir, err := b.CacheDir("tmp") + tmpDir, err := b.CacheDir(context.Background(), "tmp") require.NoError(t, err) assert.Equal(t, map[string]string{ "TMP": tmpDir, @@ -230,7 +230,7 @@ func TestSetProxyEnvVars(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -248,7 +248,7 @@ func TestSetProxyEnvVars(t *testing.T) { // No proxy env vars set. clearEnv() env := make(map[string]string, 0) - err := setProxyEnvVars(env, b) + err := setProxyEnvVars(context.Background(), env, b) require.NoError(t, err) assert.Len(t, env, 0) @@ -258,7 +258,7 @@ func TestSetProxyEnvVars(t *testing.T) { t.Setenv("https_proxy", "foo") t.Setenv("no_proxy", "foo") env = make(map[string]string, 0) - err = setProxyEnvVars(env, b) + err = setProxyEnvVars(context.Background(), env, b) require.NoError(t, err) assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env)) @@ -268,7 +268,7 @@ func TestSetProxyEnvVars(t *testing.T) { t.Setenv("HTTPS_PROXY", "foo") t.Setenv("NO_PROXY", "foo") env = make(map[string]string, 0) - err = setProxyEnvVars(env, b) + err = setProxyEnvVars(context.Background(), env, b) require.NoError(t, err) assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env)) } @@ -277,14 +277,16 @@ func TestInheritEnvVars(t *testing.T) { env := map[string]string{} t.Setenv("HOME", "/home/testuser") + t.Setenv("PATH", "/foo:/bar") t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc") - err := inheritEnvVars(env) + err := inheritEnvVars(context.Background(), env) require.NoError(t, err) require.Equal(t, map[string]string{ "HOME": "/home/testuser", + "PATH": "/foo:/bar", "TF_CLI_CONFIG_FILE": "/tmp/config.tfrc", }, env) } diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index dd1dcbb88..ea3c99aa1 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -25,6 +25,9 @@ func interpolateTerraformResourceIdentifiers(path string, lookup map[string]stri case "experiments": path = strings.Join(append([]string{"databricks_mlflow_experiment"}, parts[2:]...), interpolation.Delimiter) return fmt.Sprintf("${%s}", path), nil + case "model_serving_endpoints": + path = strings.Join(append([]string{"databricks_model_serving"}, parts[2:]...), interpolation.Delimiter) + return fmt.Sprintf("${%s}", path), nil default: panic("TODO: " + parts[1]) } diff --git a/bundle/deploy/terraform/load_test.go b/bundle/deploy/terraform/load_test.go index c235c08e8..1937ca8a2 100644 --- a/bundle/deploy/terraform/load_test.go +++ b/bundle/deploy/terraform/load_test.go @@ -20,7 +20,7 @@ func TestLoadWithNoState(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", Terraform: &config.Terraform{ ExecPath: "terraform", }, diff --git a/bundle/deploy/terraform/plan.go b/bundle/deploy/terraform/plan.go index ddaafb44f..18dd7b229 100644 --- a/bundle/deploy/terraform/plan.go +++ b/bundle/deploy/terraform/plan.go @@ -84,7 +84,7 @@ func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) error { } // Persist computed plan - tfDir, err := Dir(b) + tfDir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/deploy/terraform/state_pull.go b/bundle/deploy/terraform/state_pull.go index e5a42d89b..6dd12ccfc 100644 --- a/bundle/deploy/terraform/state_pull.go +++ b/bundle/deploy/terraform/state_pull.go @@ -25,7 +25,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - dir, err := Dir(b) + dir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/deploy/terraform/state_push.go b/bundle/deploy/terraform/state_push.go index 0b4c5dbfa..ae1d8b8b3 100644 --- a/bundle/deploy/terraform/state_push.go +++ b/bundle/deploy/terraform/state_push.go @@ -22,7 +22,7 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - dir, err := Dir(b) + dir, err := Dir(ctx, b) if err != nil { return err } @@ -32,6 +32,7 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { if err != nil { return err } + defer local.Close() // Upload state file from local cache directory to filer. log.Infof(ctx, "Writing local state file to remote state directory") diff --git a/bundle/deploy/terraform/write.go b/bundle/deploy/terraform/write.go index b40a70531..b53f9069d 100644 --- a/bundle/deploy/terraform/write.go +++ b/bundle/deploy/terraform/write.go @@ -16,7 +16,7 @@ func (w *write) Name() string { } func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { - dir, err := Dir(b) + dir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/env/env.go b/bundle/env/env.go new file mode 100644 index 000000000..ed2a13c75 --- /dev/null +++ b/bundle/env/env.go @@ -0,0 +1,18 @@ +package env + +import ( + "context" + + envlib "github.com/databricks/cli/libs/env" +) + +// Return the value of the first environment variable that is set. +func get(ctx context.Context, variables []string) (string, bool) { + for _, v := range variables { + value, ok := envlib.Lookup(ctx, v) + if ok { + return value, true + } + } + return "", false +} diff --git a/bundle/env/env_test.go b/bundle/env/env_test.go new file mode 100644 index 000000000..d900242e0 --- /dev/null +++ b/bundle/env/env_test.go @@ -0,0 +1,44 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetWithRealEnvSingleVariable(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("v1", "foo") + + v, ok := get(context.Background(), []string{"v1"}) + require.True(t, ok) + assert.Equal(t, "foo", v) + + // Not set. + v, ok = get(context.Background(), []string{"v2"}) + require.False(t, ok) + assert.Equal(t, "", v) +} + +func TestGetWithRealEnvMultipleVariables(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("v1", "foo") + + for _, vars := range [][]string{ + {"v1", "v2", "v3"}, + {"v2", "v3", "v1"}, + {"v3", "v1", "v2"}, + } { + v, ok := get(context.Background(), vars) + require.True(t, ok) + assert.Equal(t, "foo", v) + } + + // Not set. + v, ok := get(context.Background(), []string{"v2", "v3", "v4"}) + require.False(t, ok) + assert.Equal(t, "", v) +} diff --git a/bundle/env/includes.go b/bundle/env/includes.go new file mode 100644 index 000000000..4ade01877 --- /dev/null +++ b/bundle/env/includes.go @@ -0,0 +1,14 @@ +package env + +import "context" + +// IncludesVariable names the environment variable that holds additional configuration paths to include +// during bundle configuration loading. Also see `bundle/config/mutator/process_root_includes.go`. +const IncludesVariable = "DATABRICKS_BUNDLE_INCLUDES" + +// Includes returns the bundle Includes environment variable. +func Includes(ctx context.Context) (string, bool) { + return get(ctx, []string{ + IncludesVariable, + }) +} diff --git a/bundle/env/includes_test.go b/bundle/env/includes_test.go new file mode 100644 index 000000000..d9366a59f --- /dev/null +++ b/bundle/env/includes_test.go @@ -0,0 +1,28 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestIncludes(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_INCLUDES", "foo") + includes, ok := Includes(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", includes) + }) + + t.Run("not set", func(t *testing.T) { + includes, ok := Includes(ctx) + assert.False(t, ok) + assert.Equal(t, "", includes) + }) +} diff --git a/bundle/env/root.go b/bundle/env/root.go new file mode 100644 index 000000000..e3c2a38ad --- /dev/null +++ b/bundle/env/root.go @@ -0,0 +1,16 @@ +package env + +import "context" + +// RootVariable names the environment variable that holds the bundle root path. +const RootVariable = "DATABRICKS_BUNDLE_ROOT" + +// Root returns the bundle root environment variable. +func Root(ctx context.Context) (string, bool) { + return get(ctx, []string{ + RootVariable, + + // Primary variable name for the bundle root until v0.204.0. + "BUNDLE_ROOT", + }) +} diff --git a/bundle/env/root_test.go b/bundle/env/root_test.go new file mode 100644 index 000000000..fc2d6e206 --- /dev/null +++ b/bundle/env/root_test.go @@ -0,0 +1,43 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestRoot(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("first", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_ROOT", "foo") + root, ok := Root(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", root) + }) + + t.Run("second", func(t *testing.T) { + t.Setenv("BUNDLE_ROOT", "foo") + root, ok := Root(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", root) + }) + + t.Run("both set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_ROOT", "first") + t.Setenv("BUNDLE_ROOT", "second") + root, ok := Root(ctx) + assert.True(t, ok) + assert.Equal(t, "first", root) + }) + + t.Run("not set", func(t *testing.T) { + root, ok := Root(ctx) + assert.False(t, ok) + assert.Equal(t, "", root) + }) +} diff --git a/bundle/env/target.go b/bundle/env/target.go new file mode 100644 index 000000000..ac3b48877 --- /dev/null +++ b/bundle/env/target.go @@ -0,0 +1,17 @@ +package env + +import "context" + +// TargetVariable names the environment variable that holds the bundle target to use. +const TargetVariable = "DATABRICKS_BUNDLE_TARGET" + +// Target returns the bundle target environment variable. +func Target(ctx context.Context) (string, bool) { + return get(ctx, []string{ + TargetVariable, + + // Primary variable name for the bundle target until v0.203.2. + // See https://github.com/databricks/cli/pull/670. + "DATABRICKS_BUNDLE_ENV", + }) +} diff --git a/bundle/env/target_test.go b/bundle/env/target_test.go new file mode 100644 index 000000000..0c15bf917 --- /dev/null +++ b/bundle/env/target_test.go @@ -0,0 +1,43 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestTarget(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("first", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_TARGET", "foo") + target, ok := Target(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", target) + }) + + t.Run("second", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_ENV", "foo") + target, ok := Target(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", target) + }) + + t.Run("both set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_TARGET", "first") + t.Setenv("DATABRICKS_BUNDLE_ENV", "second") + target, ok := Target(ctx) + assert.True(t, ok) + assert.Equal(t, "first", target) + }) + + t.Run("not set", func(t *testing.T) { + target, ok := Target(ctx) + assert.False(t, ok) + assert.Equal(t, "", target) + }) +} diff --git a/bundle/env/temp_dir.go b/bundle/env/temp_dir.go new file mode 100644 index 000000000..b91339079 --- /dev/null +++ b/bundle/env/temp_dir.go @@ -0,0 +1,13 @@ +package env + +import "context" + +// TempDirVariable names the environment variable that holds the temporary directory to use. +const TempDirVariable = "DATABRICKS_BUNDLE_TMP" + +// TempDir returns the temporary directory to use. +func TempDir(ctx context.Context) (string, bool) { + return get(ctx, []string{ + TempDirVariable, + }) +} diff --git a/bundle/env/temp_dir_test.go b/bundle/env/temp_dir_test.go new file mode 100644 index 000000000..7659bac6d --- /dev/null +++ b/bundle/env/temp_dir_test.go @@ -0,0 +1,28 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestTempDir(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_TMP", "foo") + tempDir, ok := TempDir(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", tempDir) + }) + + t.Run("not set", func(t *testing.T) { + tempDir, ok := TempDir(ctx) + assert.False(t, ok) + assert.Equal(t, "", tempDir) + }) +} diff --git a/bundle/internal/tf/codegen/.gitignore b/bundle/internal/tf/codegen/.gitignore index d59e6e95f..72f05fc49 100644 --- a/bundle/internal/tf/codegen/.gitignore +++ b/bundle/internal/tf/codegen/.gitignore @@ -1,2 +1,3 @@ /codegen /tmp +/.vscode diff --git a/bundle/internal/tf/codegen/generator/generator.go b/bundle/internal/tf/codegen/generator/generator.go index 2bd78d96f..86d762439 100644 --- a/bundle/internal/tf/codegen/generator/generator.go +++ b/bundle/internal/tf/codegen/generator/generator.go @@ -8,6 +8,7 @@ import ( "strings" "text/template" + schemapkg "github.com/databricks/cli/bundle/internal/tf/codegen/schema" tfjson "github.com/hashicorp/terraform-json" ) @@ -32,6 +33,23 @@ func (c *collection) Generate(path string) error { return tmpl.Execute(f, c) } +type root struct { + OutputFile string + ProviderVersion string +} + +func (r *root) Generate(path string) error { + tmpl := template.Must(template.ParseFiles(fmt.Sprintf("./templates/%s.tmpl", r.OutputFile))) + f, err := os.Create(filepath.Join(path, r.OutputFile)) + if err != nil { + return err + } + + defer f.Close() + + return tmpl.Execute(f, r) +} + func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error { // Generate types for resources. var resources []*namedBlock @@ -105,5 +123,17 @@ func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error } } + // Generate root.go + { + r := &root{ + OutputFile: "root.go", + ProviderVersion: schemapkg.ProviderVersion, + } + err := r.Generate(path) + if err != nil { + return err + } + } + return nil } diff --git a/bundle/internal/tf/codegen/generator/util.go b/bundle/internal/tf/codegen/generator/util.go index 890417d81..6e703a703 100644 --- a/bundle/internal/tf/codegen/generator/util.go +++ b/bundle/internal/tf/codegen/generator/util.go @@ -1,8 +1,9 @@ package generator import ( + "slices" + "golang.org/x/exp/maps" - "golang.org/x/exp/slices" ) // sortKeys returns a sorted copy of the keys in the specified map. diff --git a/bundle/internal/tf/codegen/generator/walker.go b/bundle/internal/tf/codegen/generator/walker.go index 9532e0e4d..2ed044c3d 100644 --- a/bundle/internal/tf/codegen/generator/walker.go +++ b/bundle/internal/tf/codegen/generator/walker.go @@ -4,10 +4,11 @@ import ( "fmt" "strings" + "slices" + tfjson "github.com/hashicorp/terraform-json" "github.com/iancoleman/strcase" "github.com/zclconf/go-cty/cty" - "golang.org/x/exp/slices" ) type field struct { diff --git a/bundle/internal/tf/codegen/schema/generate.go b/bundle/internal/tf/codegen/schema/generate.go index 4d3e2832b..de2d27225 100644 --- a/bundle/internal/tf/codegen/schema/generate.go +++ b/bundle/internal/tf/codegen/schema/generate.go @@ -8,6 +8,7 @@ import ( "os" "path/filepath" + "github.com/hashicorp/go-version" "github.com/hashicorp/hc-install/product" "github.com/hashicorp/hc-install/releases" "github.com/hashicorp/terraform-exec/tfexec" @@ -19,7 +20,7 @@ func (s *Schema) writeTerraformBlock(_ context.Context) error { "required_providers": map[string]interface{}{ "databricks": map[string]interface{}{ "source": "databricks/databricks", - "version": ">= 1.0.0", + "version": ProviderVersion, }, }, }, @@ -40,9 +41,10 @@ func (s *Schema) installTerraform(ctx context.Context) (path string, err error) return } - installer := &releases.LatestVersion{ - InstallDir: installDir, + installer := &releases.ExactVersion{ Product: product.Terraform, + Version: version.Must(version.NewVersion("1.5.5")), + InstallDir: installDir, } installer.SetLogger(log.Default()) diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go new file mode 100644 index 000000000..84456731f --- /dev/null +++ b/bundle/internal/tf/codegen/schema/version.go @@ -0,0 +1,3 @@ +package schema + +const ProviderVersion = "1.23.0" diff --git a/bundle/internal/tf/codegen/templates/root.go.tmpl b/bundle/internal/tf/codegen/templates/root.go.tmpl new file mode 100644 index 000000000..3beb30072 --- /dev/null +++ b/bundle/internal/tf/codegen/templates/root.go.tmpl @@ -0,0 +1,32 @@ +package schema + +type Providers struct { + Databricks *Config `json:"databricks,omitempty"` +} + +func NewProviders() *Providers { + return &Providers{ + Databricks: &Config{}, + } +} + +type Root struct { + Terraform map[string]any `json:"terraform"` + + Provider *Providers `json:"provider,omitempty"` + Data *DataSources `json:"data,omitempty"` + Resource *Resources `json:"resource,omitempty"` +} + +func NewRoot() *Root { + return &Root{ + Terraform: map[string]interface{}{ + "required_providers": map[string]interface{}{ + "databricks": map[string]interface{}{ + "source": "databricks/databricks", + "version": "1.23.0", + }, + }, + }, + } +} diff --git a/bundle/internal/tf/schema/data_source_cluster.go b/bundle/internal/tf/schema/data_source_cluster.go index b5017402b..2aa6fb5d2 100644 --- a/bundle/internal/tf/schema/data_source_cluster.go +++ b/bundle/internal/tf/schema/data_source_cluster.go @@ -90,6 +90,7 @@ type DataSourceClusterClusterInfoGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } diff --git a/bundle/internal/tf/schema/data_source_instance_pool.go b/bundle/internal/tf/schema/data_source_instance_pool.go index 498247174..240083d64 100644 --- a/bundle/internal/tf/schema/data_source_instance_pool.go +++ b/bundle/internal/tf/schema/data_source_instance_pool.go @@ -26,6 +26,7 @@ type DataSourceInstancePoolPoolInfoDiskSpec struct { type DataSourceInstancePoolPoolInfoGcpAttributes struct { GcpAvailability string `json:"gcp_availability,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` } type DataSourceInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption struct { diff --git a/bundle/internal/tf/schema/data_source_job.go b/bundle/internal/tf/schema/data_source_job.go index a633bd3a3..d251dfe5e 100644 --- a/bundle/internal/tf/schema/data_source_job.go +++ b/bundle/internal/tf/schema/data_source_job.go @@ -25,19 +25,37 @@ type DataSourceJobJobSettingsSettingsDbtTask struct { } type DataSourceJobJobSettingsSettingsEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsGitSourceJobSource struct { + DirtyState string `json:"dirty_state,omitempty"` + ImportFromGitBranch string `json:"import_from_git_branch"` + JobConfigPath string `json:"job_config_path"` } type DataSourceJobJobSettingsSettingsGitSource struct { - Branch string `json:"branch,omitempty"` - Commit string `json:"commit,omitempty"` - Provider string `json:"provider,omitempty"` - Tag string `json:"tag,omitempty"` - Url string `json:"url"` + Branch string `json:"branch,omitempty"` + Commit string `json:"commit,omitempty"` + Provider string `json:"provider,omitempty"` + Tag string `json:"tag,omitempty"` + Url string `json:"url"` + JobSource *DataSourceJobJobSettingsSettingsGitSourceJobSource `json:"job_source,omitempty"` +} + +type DataSourceJobJobSettingsSettingsHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsHealth struct { + Rules []DataSourceJobJobSettingsSettingsHealthRules `json:"rules,omitempty"` } type DataSourceJobJobSettingsSettingsJobClusterNewClusterAutoscale struct { @@ -106,6 +124,7 @@ type DataSourceJobJobSettingsSettingsJobClusterNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -287,6 +306,7 @@ type DataSourceJobJobSettingsSettingsNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -383,8 +403,14 @@ type DataSourceJobJobSettingsSettingsNotificationSettings struct { NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` } +type DataSourceJobJobSettingsSettingsParameter struct { + Default string `json:"default,omitempty"` + Name string `json:"name,omitempty"` +} + type DataSourceJobJobSettingsSettingsPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type DataSourceJobJobSettingsSettingsPythonWheelTask struct { @@ -402,6 +428,11 @@ type DataSourceJobJobSettingsSettingsRunAs struct { UserName string `json:"user_name,omitempty"` } +type DataSourceJobJobSettingsSettingsRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type DataSourceJobJobSettingsSettingsSchedule struct { PauseStatus string `json:"pause_status,omitempty"` QuartzCronExpression string `json:"quartz_cron_expression"` @@ -445,11 +476,22 @@ type DataSourceJobJobSettingsSettingsTaskDependsOn struct { } type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskHealth struct { + Rules []DataSourceJobJobSettingsSettingsTaskHealthRules `json:"rules,omitempty"` } type DataSourceJobJobSettingsSettingsTaskLibraryCran struct { @@ -543,6 +585,7 @@ type DataSourceJobJobSettingsSettingsTaskNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -634,8 +677,15 @@ type DataSourceJobJobSettingsSettingsTaskNotebookTask struct { Source string `json:"source,omitempty"` } +type DataSourceJobJobSettingsSettingsTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + type DataSourceJobJobSettingsSettingsTaskPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct { @@ -645,6 +695,11 @@ type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct { Parameters []string `json:"parameters,omitempty"` } +type DataSourceJobJobSettingsSettingsTaskRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct { JarUri string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` @@ -702,29 +757,32 @@ type DataSourceJobJobSettingsSettingsTaskSqlTask struct { } type DataSourceJobJobSettingsSettingsTask struct { - ComputeKey string `json:"compute_key,omitempty"` - Description string `json:"description,omitempty"` - ExistingClusterId string `json:"existing_cluster_id,omitempty"` - JobClusterKey string `json:"job_cluster_key,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` - RunIf string `json:"run_if,omitempty"` - TaskKey string `json:"task_key,omitempty"` - TimeoutSeconds int `json:"timeout_seconds,omitempty"` - ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"` - DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` - DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` - EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` - Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` - NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` - NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"` - PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` - PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` - SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` - SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` - SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` - SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"` + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *DataSourceJobJobSettingsSettingsTaskHealth `json:"health,omitempty"` + Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` + NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + RunJobTask *DataSourceJobJobSettingsSettingsTaskRunJobTask `json:"run_job_task,omitempty"` + SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"` } type DataSourceJobJobSettingsSettingsTriggerFileArrival struct { @@ -738,6 +796,10 @@ type DataSourceJobJobSettingsSettingsTrigger struct { FileArrival *DataSourceJobJobSettingsSettingsTriggerFileArrival `json:"file_arrival,omitempty"` } +type DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id"` +} + type DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure struct { Id string `json:"id"` } @@ -751,9 +813,10 @@ type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct { } type DataSourceJobJobSettingsSettingsWebhookNotifications struct { - OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` - OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` - OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` + OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } type DataSourceJobJobSettingsSettings struct { @@ -771,15 +834,18 @@ type DataSourceJobJobSettingsSettings struct { DbtTask *DataSourceJobJobSettingsSettingsDbtTask `json:"dbt_task,omitempty"` EmailNotifications *DataSourceJobJobSettingsSettingsEmailNotifications `json:"email_notifications,omitempty"` GitSource *DataSourceJobJobSettingsSettingsGitSource `json:"git_source,omitempty"` + Health *DataSourceJobJobSettingsSettingsHealth `json:"health,omitempty"` JobCluster []DataSourceJobJobSettingsSettingsJobCluster `json:"job_cluster,omitempty"` Library []DataSourceJobJobSettingsSettingsLibrary `json:"library,omitempty"` NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"` NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"` NotificationSettings *DataSourceJobJobSettingsSettingsNotificationSettings `json:"notification_settings,omitempty"` + Parameter []DataSourceJobJobSettingsSettingsParameter `json:"parameter,omitempty"` PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"` PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"` Queue *DataSourceJobJobSettingsSettingsQueue `json:"queue,omitempty"` RunAs *DataSourceJobJobSettingsSettingsRunAs `json:"run_as,omitempty"` + RunJobTask *DataSourceJobJobSettingsSettingsRunJobTask `json:"run_job_task,omitempty"` Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"` SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"` SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"` diff --git a/bundle/internal/tf/schema/data_source_metastore.go b/bundle/internal/tf/schema/data_source_metastore.go new file mode 100644 index 000000000..dd14be81c --- /dev/null +++ b/bundle/internal/tf/schema/data_source_metastore.go @@ -0,0 +1,30 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceMetastoreMetastoreInfo struct { + Cloud string `json:"cloud,omitempty"` + CreatedAt int `json:"created_at,omitempty"` + CreatedBy string `json:"created_by,omitempty"` + DefaultDataAccessConfigId string `json:"default_data_access_config_id,omitempty"` + DeltaSharingOrganizationName string `json:"delta_sharing_organization_name,omitempty"` + DeltaSharingRecipientTokenLifetimeInSeconds int `json:"delta_sharing_recipient_token_lifetime_in_seconds,omitempty"` + DeltaSharingScope string `json:"delta_sharing_scope,omitempty"` + GlobalMetastoreId string `json:"global_metastore_id,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name,omitempty"` + Owner string `json:"owner,omitempty"` + PrivilegeModelVersion string `json:"privilege_model_version,omitempty"` + Region string `json:"region,omitempty"` + StorageRoot string `json:"storage_root,omitempty"` + StorageRootCredentialId string `json:"storage_root_credential_id,omitempty"` + StorageRootCredentialName string `json:"storage_root_credential_name,omitempty"` + UpdatedAt int `json:"updated_at,omitempty"` + UpdatedBy string `json:"updated_by,omitempty"` +} + +type DataSourceMetastore struct { + Id string `json:"id,omitempty"` + MetastoreId string `json:"metastore_id"` + MetastoreInfo *DataSourceMetastoreMetastoreInfo `json:"metastore_info,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_metastores.go b/bundle/internal/tf/schema/data_source_metastores.go new file mode 100644 index 000000000..c2b6854ee --- /dev/null +++ b/bundle/internal/tf/schema/data_source_metastores.go @@ -0,0 +1,8 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceMetastores struct { + Id string `json:"id,omitempty"` + Ids map[string]string `json:"ids,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_sql_warehouse.go b/bundle/internal/tf/schema/data_source_sql_warehouse.go index f90cc9dd6..218591d09 100644 --- a/bundle/internal/tf/schema/data_source_sql_warehouse.go +++ b/bundle/internal/tf/schema/data_source_sql_warehouse.go @@ -29,7 +29,7 @@ type DataSourceSqlWarehouse struct { DataSourceId string `json:"data_source_id,omitempty"` EnablePhoton bool `json:"enable_photon,omitempty"` EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"` - Id string `json:"id"` + Id string `json:"id,omitempty"` InstanceProfileArn string `json:"instance_profile_arn,omitempty"` JdbcUrl string `json:"jdbc_url,omitempty"` MaxNumClusters int `json:"max_num_clusters,omitempty"` diff --git a/bundle/internal/tf/schema/data_sources.go b/bundle/internal/tf/schema/data_sources.go index 6fbcf680b..79658298f 100644 --- a/bundle/internal/tf/schema/data_sources.go +++ b/bundle/internal/tf/schema/data_sources.go @@ -18,6 +18,8 @@ type DataSources struct { InstancePool map[string]*DataSourceInstancePool `json:"databricks_instance_pool,omitempty"` Job map[string]*DataSourceJob `json:"databricks_job,omitempty"` Jobs map[string]*DataSourceJobs `json:"databricks_jobs,omitempty"` + Metastore map[string]*DataSourceMetastore `json:"databricks_metastore,omitempty"` + Metastores map[string]*DataSourceMetastores `json:"databricks_metastores,omitempty"` MwsCredentials map[string]*DataSourceMwsCredentials `json:"databricks_mws_credentials,omitempty"` MwsWorkspaces map[string]*DataSourceMwsWorkspaces `json:"databricks_mws_workspaces,omitempty"` NodeType map[string]*DataSourceNodeType `json:"databricks_node_type,omitempty"` @@ -55,6 +57,8 @@ func NewDataSources() *DataSources { InstancePool: make(map[string]*DataSourceInstancePool), Job: make(map[string]*DataSourceJob), Jobs: make(map[string]*DataSourceJobs), + Metastore: make(map[string]*DataSourceMetastore), + Metastores: make(map[string]*DataSourceMetastores), MwsCredentials: make(map[string]*DataSourceMwsCredentials), MwsWorkspaces: make(map[string]*DataSourceMwsWorkspaces), NodeType: make(map[string]*DataSourceNodeType), diff --git a/bundle/internal/tf/schema/resource_access_control_rule_set.go b/bundle/internal/tf/schema/resource_access_control_rule_set.go new file mode 100644 index 000000000..775c0708b --- /dev/null +++ b/bundle/internal/tf/schema/resource_access_control_rule_set.go @@ -0,0 +1,15 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceAccessControlRuleSetGrantRules struct { + Principals []string `json:"principals,omitempty"` + Role string `json:"role"` +} + +type ResourceAccessControlRuleSet struct { + Etag string `json:"etag,omitempty"` + Id string `json:"id,omitempty"` + Name string `json:"name"` + GrantRules []ResourceAccessControlRuleSetGrantRules `json:"grant_rules,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_cluster.go b/bundle/internal/tf/schema/resource_cluster.go index a95b8c13a..bb4e35824 100644 --- a/bundle/internal/tf/schema/resource_cluster.go +++ b/bundle/internal/tf/schema/resource_cluster.go @@ -68,6 +68,7 @@ type ResourceClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } diff --git a/bundle/internal/tf/schema/resource_connection.go b/bundle/internal/tf/schema/resource_connection.go new file mode 100644 index 000000000..a249a5393 --- /dev/null +++ b/bundle/internal/tf/schema/resource_connection.go @@ -0,0 +1,15 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceConnection struct { + Comment string `json:"comment,omitempty"` + ConnectionType string `json:"connection_type"` + Id string `json:"id,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name"` + Options map[string]string `json:"options"` + Owner string `json:"owner,omitempty"` + Properties map[string]string `json:"properties,omitempty"` + ReadOnly bool `json:"read_only,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_group.go b/bundle/internal/tf/schema/resource_group.go index 252d20874..7d7860f5d 100644 --- a/bundle/internal/tf/schema/resource_group.go +++ b/bundle/internal/tf/schema/resource_group.go @@ -3,6 +3,7 @@ package schema type ResourceGroup struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"` diff --git a/bundle/internal/tf/schema/resource_instance_pool.go b/bundle/internal/tf/schema/resource_instance_pool.go index 2c3221217..f524b3fce 100644 --- a/bundle/internal/tf/schema/resource_instance_pool.go +++ b/bundle/internal/tf/schema/resource_instance_pool.go @@ -26,6 +26,7 @@ type ResourceInstancePoolDiskSpec struct { type ResourceInstancePoolGcpAttributes struct { GcpAvailability string `json:"gcp_availability,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` } type ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption struct { diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index e3137ea15..50101400a 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -25,19 +25,37 @@ type ResourceJobDbtTask struct { } type ResourceJobEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobGitSourceJobSource struct { + DirtyState string `json:"dirty_state,omitempty"` + ImportFromGitBranch string `json:"import_from_git_branch"` + JobConfigPath string `json:"job_config_path"` } type ResourceJobGitSource struct { - Branch string `json:"branch,omitempty"` - Commit string `json:"commit,omitempty"` - Provider string `json:"provider,omitempty"` - Tag string `json:"tag,omitempty"` - Url string `json:"url"` + Branch string `json:"branch,omitempty"` + Commit string `json:"commit,omitempty"` + Provider string `json:"provider,omitempty"` + Tag string `json:"tag,omitempty"` + Url string `json:"url"` + JobSource *ResourceJobGitSourceJobSource `json:"job_source,omitempty"` +} + +type ResourceJobHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobHealth struct { + Rules []ResourceJobHealthRules `json:"rules,omitempty"` } type ResourceJobJobClusterNewClusterAutoscale struct { @@ -106,6 +124,7 @@ type ResourceJobJobClusterNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -287,6 +306,7 @@ type ResourceJobNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -383,8 +403,14 @@ type ResourceJobNotificationSettings struct { NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` } +type ResourceJobParameter struct { + Default string `json:"default,omitempty"` + Name string `json:"name,omitempty"` +} + type ResourceJobPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type ResourceJobPythonWheelTask struct { @@ -402,6 +428,11 @@ type ResourceJobRunAs struct { UserName string `json:"user_name,omitempty"` } +type ResourceJobRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type ResourceJobSchedule struct { PauseStatus string `json:"pause_status,omitempty"` QuartzCronExpression string `json:"quartz_cron_expression"` @@ -445,11 +476,22 @@ type ResourceJobTaskDependsOn struct { } type ResourceJobTaskEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobTaskHealth struct { + Rules []ResourceJobTaskHealthRules `json:"rules,omitempty"` } type ResourceJobTaskLibraryCran struct { @@ -543,6 +585,7 @@ type ResourceJobTaskNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -634,8 +677,15 @@ type ResourceJobTaskNotebookTask struct { Source string `json:"source,omitempty"` } +type ResourceJobTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + type ResourceJobTaskPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type ResourceJobTaskPythonWheelTask struct { @@ -645,6 +695,11 @@ type ResourceJobTaskPythonWheelTask struct { Parameters []string `json:"parameters,omitempty"` } +type ResourceJobTaskRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type ResourceJobTaskSparkJarTask struct { JarUri string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` @@ -702,29 +757,32 @@ type ResourceJobTaskSqlTask struct { } type ResourceJobTask struct { - ComputeKey string `json:"compute_key,omitempty"` - Description string `json:"description,omitempty"` - ExistingClusterId string `json:"existing_cluster_id,omitempty"` - JobClusterKey string `json:"job_cluster_key,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` - RunIf string `json:"run_if,omitempty"` - TaskKey string `json:"task_key,omitempty"` - TimeoutSeconds int `json:"timeout_seconds,omitempty"` - ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"` - DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` - DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` - EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` - Library []ResourceJobTaskLibrary `json:"library,omitempty"` - NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` - NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"` - PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` - PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` - SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` - SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` - SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` - SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"` + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *ResourceJobTaskHealth `json:"health,omitempty"` + Library []ResourceJobTaskLibrary `json:"library,omitempty"` + NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + RunJobTask *ResourceJobTaskRunJobTask `json:"run_job_task,omitempty"` + SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"` } type ResourceJobTriggerFileArrival struct { @@ -738,6 +796,10 @@ type ResourceJobTrigger struct { FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"` } +type ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id"` +} + type ResourceJobWebhookNotificationsOnFailure struct { Id string `json:"id"` } @@ -751,13 +813,15 @@ type ResourceJobWebhookNotificationsOnSuccess struct { } type ResourceJobWebhookNotifications struct { - OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` - OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` - OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` + OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } type ResourceJob struct { AlwaysRunning bool `json:"always_running,omitempty"` + ControlRunState bool `json:"control_run_state,omitempty"` ExistingClusterId string `json:"existing_cluster_id,omitempty"` Format string `json:"format,omitempty"` Id string `json:"id,omitempty"` @@ -774,15 +838,18 @@ type ResourceJob struct { DbtTask *ResourceJobDbtTask `json:"dbt_task,omitempty"` EmailNotifications *ResourceJobEmailNotifications `json:"email_notifications,omitempty"` GitSource *ResourceJobGitSource `json:"git_source,omitempty"` + Health *ResourceJobHealth `json:"health,omitempty"` JobCluster []ResourceJobJobCluster `json:"job_cluster,omitempty"` Library []ResourceJobLibrary `json:"library,omitempty"` NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"` NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"` NotificationSettings *ResourceJobNotificationSettings `json:"notification_settings,omitempty"` + Parameter []ResourceJobParameter `json:"parameter,omitempty"` PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"` PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"` Queue *ResourceJobQueue `json:"queue,omitempty"` RunAs *ResourceJobRunAs `json:"run_as,omitempty"` + RunJobTask *ResourceJobRunJobTask `json:"run_job_task,omitempty"` Schedule *ResourceJobSchedule `json:"schedule,omitempty"` SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"` SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"` diff --git a/bundle/internal/tf/schema/resource_model_serving.go b/bundle/internal/tf/schema/resource_model_serving.go index b7ff88ccd..cc5c32570 100644 --- a/bundle/internal/tf/schema/resource_model_serving.go +++ b/bundle/internal/tf/schema/resource_model_serving.go @@ -4,6 +4,7 @@ package schema type ResourceModelServingConfigServedModels struct { EnvironmentVars map[string]string `json:"environment_vars,omitempty"` + InstanceProfileArn string `json:"instance_profile_arn,omitempty"` ModelName string `json:"model_name"` ModelVersion string `json:"model_version"` Name string `json:"name,omitempty"` diff --git a/bundle/internal/tf/schema/resource_pipeline.go b/bundle/internal/tf/schema/resource_pipeline.go index 9e7f71b11..5c5de9a7e 100644 --- a/bundle/internal/tf/schema/resource_pipeline.go +++ b/bundle/internal/tf/schema/resource_pipeline.go @@ -47,6 +47,7 @@ type ResourcePipelineClusterClusterLogConf struct { type ResourcePipelineClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` ZoneId string `json:"zone_id,omitempty"` } diff --git a/bundle/internal/tf/schema/resource_service_principal.go b/bundle/internal/tf/schema/resource_service_principal.go index bdbce2278..5e9943a13 100644 --- a/bundle/internal/tf/schema/resource_service_principal.go +++ b/bundle/internal/tf/schema/resource_service_principal.go @@ -3,6 +3,7 @@ package schema type ResourceServicePrincipal struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` Active bool `json:"active,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` diff --git a/bundle/internal/tf/schema/resource_user.go b/bundle/internal/tf/schema/resource_user.go index b96440934..2fe57b8b0 100644 --- a/bundle/internal/tf/schema/resource_user.go +++ b/bundle/internal/tf/schema/resource_user.go @@ -3,6 +3,7 @@ package schema type ResourceUser struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` Active bool `json:"active,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` diff --git a/bundle/internal/tf/schema/resources.go b/bundle/internal/tf/schema/resources.go index 7a0c2eb8b..c2361254a 100644 --- a/bundle/internal/tf/schema/resources.go +++ b/bundle/internal/tf/schema/resources.go @@ -3,6 +3,7 @@ package schema type Resources struct { + AccessControlRuleSet map[string]*ResourceAccessControlRuleSet `json:"databricks_access_control_rule_set,omitempty"` AwsS3Mount map[string]*ResourceAwsS3Mount `json:"databricks_aws_s3_mount,omitempty"` AzureAdlsGen1Mount map[string]*ResourceAzureAdlsGen1Mount `json:"databricks_azure_adls_gen1_mount,omitempty"` AzureAdlsGen2Mount map[string]*ResourceAzureAdlsGen2Mount `json:"databricks_azure_adls_gen2_mount,omitempty"` @@ -82,6 +83,7 @@ type Resources struct { func NewResources() *Resources { return &Resources{ + AccessControlRuleSet: make(map[string]*ResourceAccessControlRuleSet), AwsS3Mount: make(map[string]*ResourceAwsS3Mount), AzureAdlsGen1Mount: make(map[string]*ResourceAzureAdlsGen1Mount), AzureAdlsGen2Mount: make(map[string]*ResourceAzureAdlsGen2Mount), diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index 9cfe8491d..3beb30072 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -24,7 +24,7 @@ func NewRoot() *Root { "required_providers": map[string]interface{}{ "databricks": map[string]interface{}{ "source": "databricks/databricks", - "version": ">= 1.0.0", + "version": "1.23.0", }, }, }, diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go new file mode 100644 index 000000000..8e2e504c5 --- /dev/null +++ b/bundle/libraries/libraries.go @@ -0,0 +1,198 @@ +package libraries + +import ( + "context" + "fmt" + "net/url" + "path/filepath" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +type match struct { +} + +func MatchWithArtifacts() bundle.Mutator { + return &match{} +} + +func (a *match) Name() string { + return "libraries.MatchWithArtifacts" +} + +func (a *match) Apply(ctx context.Context, b *bundle.Bundle) error { + tasks := findAllTasks(b) + for _, task := range tasks { + if isMissingRequiredLibraries(task) { + return fmt.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) + } + for j := range task.Libraries { + lib := &task.Libraries[j] + err := findArtifactsAndMarkForUpload(ctx, lib, b) + if err != nil { + return err + } + } + } + return nil +} + +func findAllTasks(b *bundle.Bundle) []*jobs.Task { + r := b.Config.Resources + result := make([]*jobs.Task, 0) + for k := range b.Config.Resources.Jobs { + tasks := r.Jobs[k].JobSettings.Tasks + for i := range tasks { + task := &tasks[i] + result = append(result, task) + } + } + + return result +} + +func FindAllWheelTasksWithLocalLibraries(b *bundle.Bundle) []*jobs.Task { + tasks := findAllTasks(b) + wheelTasks := make([]*jobs.Task, 0) + for _, task := range tasks { + if task.PythonWheelTask != nil && IsTaskWithLocalLibraries(task) { + wheelTasks = append(wheelTasks, task) + } + } + + return wheelTasks +} + +func IsTaskWithLocalLibraries(task *jobs.Task) bool { + for _, l := range task.Libraries { + if isLocalLibrary(&l) { + return true + } + } + + return false +} + +func IsTaskWithWorkspaceLibraries(task *jobs.Task) bool { + for _, l := range task.Libraries { + path := libPath(&l) + if isWorkspacePath(path) { + return true + } + } + + return false +} + +func isMissingRequiredLibraries(task *jobs.Task) bool { + if task.Libraries != nil { + return false + } + + return task.PythonWheelTask != nil || task.SparkJarTask != nil +} + +func findLibraryMatches(lib *compute.Library, b *bundle.Bundle) ([]string, error) { + path := libPath(lib) + if path == "" { + return nil, nil + } + + fullPath := filepath.Join(b.Config.Path, path) + return filepath.Glob(fullPath) +} + +func findArtifactsAndMarkForUpload(ctx context.Context, lib *compute.Library, b *bundle.Bundle) error { + matches, err := findLibraryMatches(lib, b) + if err != nil { + return err + } + + if len(matches) == 0 && isLocalLibrary(lib) { + return fmt.Errorf("file %s is referenced in libraries section but doesn't exist on the local file system", libPath(lib)) + } + + for _, match := range matches { + af, err := findArtifactFileByLocalPath(match, b) + if err != nil { + cmdio.LogString(ctx, fmt.Sprintf("%s. Skipping uploading. In order to use the define 'artifacts' section", err.Error())) + } else { + af.Libraries = append(af.Libraries, lib) + } + } + + return nil +} + +func findArtifactFileByLocalPath(path string, b *bundle.Bundle) (*config.ArtifactFile, error) { + for _, a := range b.Config.Artifacts { + for k := range a.Files { + if a.Files[k].Source == path { + return &a.Files[k], nil + } + } + } + + return nil, fmt.Errorf("artifact section is not defined for file at %s", path) +} + +func libPath(library *compute.Library) string { + if library.Whl != "" { + return library.Whl + } + if library.Jar != "" { + return library.Jar + } + if library.Egg != "" { + return library.Egg + } + + return "" +} + +func isLocalLibrary(library *compute.Library) bool { + path := libPath(library) + if path == "" { + return false + } + + if isExplicitFileScheme(path) { + return true + } + + if isRemoteStorageScheme(path) { + return false + } + + return !isWorkspacePath(path) +} + +func isExplicitFileScheme(path string) bool { + return strings.HasPrefix(path, "file://") +} + +func isRemoteStorageScheme(path string) bool { + url, err := url.Parse(path) + if err != nil { + return false + } + + if url.Scheme == "" { + return false + } + + // If the path starts with scheme:/ format, it's a correct remote storage scheme + return strings.HasPrefix(path, url.Scheme+":/") + +} + +func isWorkspacePath(path string) bool { + return strings.HasPrefix(path, "/Workspace/") || + strings.HasPrefix(path, "/Users/") || + strings.HasPrefix(path, "/Shared/") +} diff --git a/bundle/libraries/libraries_test.go b/bundle/libraries/libraries_test.go new file mode 100644 index 000000000..7ff1609ab --- /dev/null +++ b/bundle/libraries/libraries_test.go @@ -0,0 +1,31 @@ +package libraries + +import ( + "fmt" + "testing" + + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/stretchr/testify/require" +) + +var testCases map[string]bool = map[string]bool{ + "./some/local/path": true, + "/some/full/path": true, + "/Workspace/path/to/package": false, + "/Users/path/to/package": false, + "file://path/to/package": true, + "C:\\path\\to\\package": true, + "dbfs://path/to/package": false, + "dbfs:/path/to/package": false, + "s3://path/to/package": false, + "abfss://path/to/package": false, +} + +func TestIsLocalLbrary(t *testing.T) { + for p, result := range testCases { + lib := compute.Library{ + Whl: p, + } + require.Equal(t, result, isLocalLibrary(&lib), fmt.Sprintf("isLocalLibrary must return %t for path %s ", result, p)) + } +} diff --git a/bundle/phases/build.go b/bundle/phases/build.go index 9249c32c0..760967fca 100644 --- a/bundle/phases/build.go +++ b/bundle/phases/build.go @@ -3,7 +3,9 @@ package phases import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/interpolation" + "github.com/databricks/cli/bundle/scripts" ) // The build phase builds artifacts. @@ -11,7 +13,11 @@ func Build() bundle.Mutator { return newPhase( "build", []bundle.Mutator{ + scripts.Execute(config.ScriptPreBuild), + artifacts.DetectPackages(), + artifacts.InferMissingProperties(), artifacts.BuildAll(), + scripts.Execute(config.ScriptPostBuild), interpolation.Interpolate( interpolation.IncludeLookupsInPath("artifacts"), ), diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 116fd38bd..dc57a5174 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -3,19 +3,29 @@ package phases import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/bundle/python" + "github.com/databricks/cli/bundle/scripts" ) // The deploy phase deploys artifacts and resources. func Deploy() bundle.Mutator { deployMutator := bundle.Seq( + scripts.Execute(config.ScriptPreDeploy), lock.Acquire(), bundle.Defer( bundle.Seq( - files.Upload(), + mutator.ValidateGitDetails(), + libraries.MatchWithArtifacts(), + artifacts.CleanUp(), artifacts.UploadAll(), + python.TransformWheelTask(), + files.Upload(), terraform.Interpolate(), terraform.Write(), terraform.StatePull(), @@ -25,6 +35,7 @@ func Deploy() bundle.Mutator { ), lock.Release(lock.GoalDeploy), ), + scripts.Execute(config.ScriptPostDeploy), ) return newPhase( diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index fc5056f63..431fe27d4 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -2,10 +2,12 @@ package phases import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/scripts" ) // The initialize phase fills in defaults and connects to the workspace. @@ -16,6 +18,7 @@ func Initialize() bundle.Mutator { "initialize", []bundle.Mutator{ mutator.PopulateCurrentUser(), + mutator.SetRunAs(), mutator.DefineDefaultWorkspaceRoot(), mutator.ExpandWorkspaceRoot(), mutator.DefineDefaultWorkspacePaths(), @@ -26,9 +29,10 @@ func Initialize() bundle.Mutator { interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), ), mutator.OverrideCompute(), - mutator.ProcessEnvironmentMode(), + mutator.ProcessTargetMode(), mutator.TranslatePaths(), terraform.Initialize(), + scripts.Execute(config.ScriptPostInit), }, ) } diff --git a/bundle/python/transform.go b/bundle/python/transform.go new file mode 100644 index 000000000..d8eb33f54 --- /dev/null +++ b/bundle/python/transform.go @@ -0,0 +1,125 @@ +package python + +import ( + "fmt" + "strconv" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +const NOTEBOOK_TEMPLATE = `# Databricks notebook source +%python +{{range .Libraries}} +%pip install --force-reinstall {{.Whl}} +{{end}} + +dbutils.library.restartPython() + +try: + from importlib import metadata +except ImportError: # for Python<3.8 + import subprocess + import sys + + subprocess.check_call([sys.executable, "-m", "pip", "install", "importlib-metadata"]) + import importlib_metadata as metadata + +from contextlib import redirect_stdout +import io +import sys +sys.argv = [{{.Params}}] + +entry = [ep for ep in metadata.distribution("{{.Task.PackageName}}").entry_points if ep.name == "{{.Task.EntryPoint}}"] + +f = io.StringIO() +with redirect_stdout(f): + if entry: + entry[0].load()() + else: + raise ImportError("Entry point '{{.Task.EntryPoint}}' not found") +s = f.getvalue() +dbutils.notebook.exit(s) +` + +// This mutator takes the wheel task and transforms it into notebook +// which installs uploaded wheels using %pip and then calling corresponding +// entry point. +func TransformWheelTask() bundle.Mutator { + return mutator.NewTrampoline( + "python_wheel", + &pythonTrampoline{}, + NOTEBOOK_TEMPLATE, + ) +} + +type pythonTrampoline struct{} + +func (t *pythonTrampoline) CleanUp(task *jobs.Task) error { + task.PythonWheelTask = nil + task.Libraries = nil + + return nil +} + +func (t *pythonTrampoline) GetTasks(b *bundle.Bundle) []mutator.TaskWithJobKey { + r := b.Config.Resources + result := make([]mutator.TaskWithJobKey, 0) + for k := range b.Config.Resources.Jobs { + tasks := r.Jobs[k].JobSettings.Tasks + for i := range tasks { + task := &tasks[i] + + // Keep only Python wheel tasks with workspace libraries referenced. + // At this point of moment we don't have local paths in Libraries sections anymore + // Local paths have been replaced with the remote when the artifacts where uploaded + // in artifacts.UploadAll mutator. + if task.PythonWheelTask == nil || !needsTrampoline(task) { + continue + } + + result = append(result, mutator.TaskWithJobKey{ + JobKey: k, + Task: task, + }) + } + } + return result +} + +func needsTrampoline(task *jobs.Task) bool { + return libraries.IsTaskWithWorkspaceLibraries(task) +} + +func (t *pythonTrampoline) GetTemplateData(task *jobs.Task) (map[string]any, error) { + params, err := t.generateParameters(task.PythonWheelTask) + if err != nil { + return nil, err + } + + data := map[string]any{ + "Libraries": task.Libraries, + "Params": params, + "Task": task.PythonWheelTask, + } + + return data, nil +} + +func (t *pythonTrampoline) generateParameters(task *jobs.PythonWheelTask) (string, error) { + if task.Parameters != nil && task.NamedParameters != nil { + return "", fmt.Errorf("not allowed to pass both paramaters and named_parameters") + } + params := append([]string{"python"}, task.Parameters...) + for k, v := range task.NamedParameters { + params = append(params, fmt.Sprintf("%s=%s", k, v)) + } + + for i := range params { + params[i] = strconv.Quote(params[i]) + } + return strings.Join(params, ", "), nil +} diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go new file mode 100644 index 000000000..a7448f234 --- /dev/null +++ b/bundle/python/transform_test.go @@ -0,0 +1,145 @@ +package python + +import ( + "context" + "strings" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/require" +) + +type testCase struct { + Actual []string + Expected string +} + +type testCaseNamed struct { + Actual map[string]string + Expected string +} + +var paramsTestCases []testCase = []testCase{ + {[]string{}, `"python"`}, + {[]string{"a"}, `"python", "a"`}, + {[]string{"a", "b"}, `"python", "a", "b"`}, + {[]string{"123!@#$%^&*()-="}, `"python", "123!@#$%^&*()-="`}, + {[]string{`{"a": 1}`}, `"python", "{\"a\": 1}"`}, +} + +var paramsTestCasesNamed []testCaseNamed = []testCaseNamed{ + {map[string]string{}, `"python"`}, + {map[string]string{"a": "1"}, `"python", "a=1"`}, + {map[string]string{"a": "'1'"}, `"python", "a='1'"`}, + {map[string]string{"a": `"1"`}, `"python", "a=\"1\""`}, + {map[string]string{"a": "1", "b": "2"}, `"python", "a=1", "b=2"`}, + {map[string]string{"data": `{"a": 1}`}, `"python", "data={\"a\": 1}"`}, +} + +func TestGenerateParameters(t *testing.T) { + trampoline := pythonTrampoline{} + for _, c := range paramsTestCases { + task := &jobs.PythonWheelTask{Parameters: c.Actual} + result, err := trampoline.generateParameters(task) + require.NoError(t, err) + require.Equal(t, c.Expected, result) + } +} + +func TestGenerateNamedParameters(t *testing.T) { + trampoline := pythonTrampoline{} + for _, c := range paramsTestCasesNamed { + task := &jobs.PythonWheelTask{NamedParameters: c.Actual} + result, err := trampoline.generateParameters(task) + require.NoError(t, err) + + // parameters order can be undetermenistic, so just check that they exist as expected + require.ElementsMatch(t, strings.Split(c.Expected, ","), strings.Split(result, ",")) + } +} + +func TestGenerateBoth(t *testing.T) { + trampoline := pythonTrampoline{} + task := &jobs.PythonWheelTask{NamedParameters: map[string]string{"a": "1"}, Parameters: []string{"b"}} + _, err := trampoline.generateParameters(task) + require.Error(t, err) + require.ErrorContains(t, err, "not allowed to pass both paramaters and named_parameters") +} + +func TestTransformFiltersWheelTasksOnly(t *testing.T) { + trampoline := pythonTrampoline{} + bundle := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + Tasks: []jobs.Task{ + { + TaskKey: "key1", + PythonWheelTask: &jobs.PythonWheelTask{}, + Libraries: []compute.Library{ + {Whl: "/Workspace/Users/test@test.com/bundle/dist/test.whl"}, + }, + }, + { + TaskKey: "key2", + NotebookTask: &jobs.NotebookTask{}, + }, + { + TaskKey: "key3", + PythonWheelTask: &jobs.PythonWheelTask{}, + Libraries: []compute.Library{ + {Whl: "dbfs:/FileStore/dist/test.whl"}, + }, + }, + }, + }, + }, + }, + }, + }, + } + + tasks := trampoline.GetTasks(bundle) + require.Len(t, tasks, 1) + require.Equal(t, "job1", tasks[0].JobKey) + require.Equal(t, "key1", tasks[0].Task.TaskKey) + require.NotNil(t, tasks[0].Task.PythonWheelTask) +} + +func TestNoPanicWithNoPythonWheelTasks(t *testing.T) { + tmpDir := t.TempDir() + b := &bundle.Bundle{ + Config: config.Root{ + Path: tmpDir, + Bundle: config.Bundle{ + Target: "development", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "test": { + Paths: paths.Paths{ + ConfigFilePath: tmpDir, + }, + JobSettings: &jobs.JobSettings{ + Tasks: []jobs.Task{ + { + TaskKey: "notebook_task", + NotebookTask: &jobs.NotebookTask{}}, + }, + }, + }, + }, + }, + }, + } + trampoline := TransformWheelTask() + err := bundle.Apply(context.Background(), b, trampoline) + require.NoError(t, err) +} diff --git a/bundle/root.go b/bundle/root.go index 70d778e15..7518bf5fc 100644 --- a/bundle/root.go +++ b/bundle/root.go @@ -1,21 +1,21 @@ package bundle import ( + "context" "fmt" "os" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/folders" ) -const envBundleRoot = "BUNDLE_ROOT" - -// getRootEnv returns the value of the `BUNDLE_ROOT` environment variable +// getRootEnv returns the value of the bundle root environment variable // if it set and is a directory. If the environment variable is set but // is not a directory, it returns an error. If the environment variable is // not set, it returns an empty string. -func getRootEnv() (string, error) { - path, ok := os.LookupEnv(envBundleRoot) +func getRootEnv(ctx context.Context) (string, error) { + path, ok := env.Root(ctx) if !ok { return "", nil } @@ -24,7 +24,7 @@ func getRootEnv() (string, error) { err = fmt.Errorf("not a directory") } if err != nil { - return "", fmt.Errorf(`invalid bundle root %s="%s": %w`, envBundleRoot, path, err) + return "", fmt.Errorf(`invalid bundle root %s="%s": %w`, env.RootVariable, path, err) } return path, nil } @@ -36,16 +36,20 @@ func getRootWithTraversal() (string, error) { if err != nil { return "", err } - path, err := folders.FindDirWithLeaf(wd, config.FileName) - if err != nil { - return "", fmt.Errorf(`unable to locate bundle root: %s not found`, config.FileName) + + for _, file := range config.FileNames { + path, err := folders.FindDirWithLeaf(wd, file) + if err == nil { + return path, nil + } } - return path, nil + + return "", fmt.Errorf(`unable to locate bundle root: %s not found`, config.FileNames[0]) } // mustGetRoot returns a bundle root or an error if one cannot be found. -func mustGetRoot() (string, error) { - path, err := getRootEnv() +func mustGetRoot(ctx context.Context) (string, error) { + path, err := getRootEnv(ctx) if path != "" || err != nil { return path, err } @@ -53,9 +57,9 @@ func mustGetRoot() (string, error) { } // tryGetRoot returns a bundle root or an empty string if one cannot be found. -func tryGetRoot() (string, error) { +func tryGetRoot(ctx context.Context) (string, error) { // Note: an invalid value in the environment variable is still an error. - path, err := getRootEnv() + path, err := getRootEnv(ctx) if path != "" || err != nil { return path, err } diff --git a/bundle/root_test.go b/bundle/root_test.go index dab002256..88113546c 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -1,11 +1,14 @@ package bundle import ( + "context" "os" "path/filepath" "testing" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -30,54 +33,60 @@ func chdir(t *testing.T, dir string) string { } func TestRootFromEnv(t *testing.T) { + ctx := context.Background() dir := t.TempDir() - t.Setenv(envBundleRoot, dir) + t.Setenv(env.RootVariable, dir) // It should pull the root from the environment variable. - root, err := mustGetRoot() + root, err := mustGetRoot(ctx) require.NoError(t, err) require.Equal(t, root, dir) } func TestRootFromEnvDoesntExist(t *testing.T) { + ctx := context.Background() dir := t.TempDir() - t.Setenv(envBundleRoot, filepath.Join(dir, "doesntexist")) + t.Setenv(env.RootVariable, filepath.Join(dir, "doesntexist")) // It should pull the root from the environment variable. - _, err := mustGetRoot() + _, err := mustGetRoot(ctx) require.Errorf(t, err, "invalid bundle root") } func TestRootFromEnvIsFile(t *testing.T) { + ctx := context.Background() dir := t.TempDir() f, err := os.Create(filepath.Join(dir, "invalid")) require.NoError(t, err) f.Close() - t.Setenv(envBundleRoot, f.Name()) + t.Setenv(env.RootVariable, f.Name()) // It should pull the root from the environment variable. - _, err = mustGetRoot() + _, err = mustGetRoot(ctx) require.Errorf(t, err, "invalid bundle root") } func TestRootIfEnvIsEmpty(t *testing.T) { + ctx := context.Background() dir := "" - t.Setenv(envBundleRoot, dir) + t.Setenv(env.RootVariable, dir) // It should pull the root from the environment variable. - _, err := mustGetRoot() + _, err := mustGetRoot(ctx) require.Errorf(t, err, "invalid bundle root") } func TestRootLookup(t *testing.T) { + ctx := context.Background() + // Have to set then unset to allow the testing package to revert it to its original value. - t.Setenv(envBundleRoot, "") - os.Unsetenv(envBundleRoot) + t.Setenv(env.RootVariable, "") + os.Unsetenv(env.RootVariable) chdir(t, t.TempDir()) - // Create bundle.yml file. - f, err := os.Create(config.FileName) + // Create databricks.yml file. + f, err := os.Create(config.FileNames[0]) require.NoError(t, err) defer f.Close() @@ -87,18 +96,66 @@ func TestRootLookup(t *testing.T) { // It should find the project root from $PWD. wd := chdir(t, "./a/b/c") - root, err := mustGetRoot() + root, err := mustGetRoot(ctx) require.NoError(t, err) require.Equal(t, wd, root) } func TestRootLookupError(t *testing.T) { + ctx := context.Background() + // Have to set then unset to allow the testing package to revert it to its original value. - t.Setenv(envBundleRoot, "") - os.Unsetenv(envBundleRoot) + t.Setenv(env.RootVariable, "") + os.Unsetenv(env.RootVariable) // It can't find a project root from a temporary directory. _ = chdir(t, t.TempDir()) - _, err := mustGetRoot() + _, err := mustGetRoot(ctx) require.ErrorContains(t, err, "unable to locate bundle root") } + +func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { + ctx := context.Background() + chdir(t, filepath.Join(".", "tests", "basic")) + t.Setenv(env.IncludesVariable, "test") + + bundle, err := MustLoad(ctx) + assert.NoError(t, err) + assert.Equal(t, "basic", bundle.Config.Bundle.Name) + + cwd, err := os.Getwd() + assert.NoError(t, err) + assert.Equal(t, cwd, bundle.Config.Path) +} + +func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { + ctx := context.Background() + dir := t.TempDir() + chdir(t, dir) + t.Setenv(env.RootVariable, dir) + t.Setenv(env.IncludesVariable, "test") + + bundle, err := MustLoad(ctx) + assert.NoError(t, err) + assert.Equal(t, dir, bundle.Config.Path) +} + +func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) { + ctx := context.Background() + dir := t.TempDir() + chdir(t, dir) + t.Setenv(env.IncludesVariable, "test") + + _, err := MustLoad(ctx) + assert.Error(t, err) +} + +func TestErrorIfNoYamlNoIncludesEnvAndRootEnvPresent(t *testing.T) { + ctx := context.Background() + dir := t.TempDir() + chdir(t, dir) + t.Setenv(env.RootVariable, dir) + + _, err := MustLoad(ctx) + assert.Error(t, err) +} diff --git a/bundle/run/job.go b/bundle/run/job.go index f152a17d0..319cd1464 100644 --- a/bundle/run/job.go +++ b/bundle/run/job.go @@ -95,6 +95,13 @@ type jobRunner struct { job *resources.Job } +func (r *jobRunner) Name() string { + if r.job == nil || r.job.JobSettings == nil { + return "" + } + return r.job.JobSettings.Name +} + func isFailed(task jobs.RunTask) bool { return task.State.LifeCycleState == jobs.RunLifeCycleStateInternalError || (task.State.LifeCycleState == jobs.RunLifeCycleStateTerminated && diff --git a/bundle/run/keys.go b/bundle/run/keys.go index c8b7a2b5b..76ec50ac8 100644 --- a/bundle/run/keys.go +++ b/bundle/run/keys.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "golang.org/x/exp/maps" ) // RunnerLookup maps identifiers to a list of workloads that match that identifier. @@ -32,18 +33,20 @@ func ResourceKeys(b *bundle.Bundle) (keyOnly RunnerLookup, keyWithType RunnerLoo return } -// ResourceCompletions returns a list of keys that unambiguously reference resources in the bundle. -func ResourceCompletions(b *bundle.Bundle) []string { - seen := make(map[string]bool) - comps := []string{} +// ResourceCompletionMap returns a map of resource keys to their respective names. +func ResourceCompletionMap(b *bundle.Bundle) map[string]string { + out := make(map[string]string) keyOnly, keyWithType := ResourceKeys(b) + // Keep track of resources we have seen by their fully qualified key. + seen := make(map[string]bool) + // First add resources that can be identified by key alone. for k, v := range keyOnly { // Invariant: len(v) >= 1. See [ResourceKeys]. if len(v) == 1 { seen[v[0].Key()] = true - comps = append(comps, k) + out[k] = v[0].Name() } } @@ -54,8 +57,13 @@ func ResourceCompletions(b *bundle.Bundle) []string { if ok { continue } - comps = append(comps, k) + out[k] = v[0].Name() } - return comps + return out +} + +// ResourceCompletions returns a list of keys that unambiguously reference resources in the bundle. +func ResourceCompletions(b *bundle.Bundle) []string { + return maps.Keys(ResourceCompletionMap(b)) } diff --git a/bundle/run/output/job.go b/bundle/run/output/job.go index 4bea4c7ad..6199ac2f7 100644 --- a/bundle/run/output/job.go +++ b/bundle/run/output/job.go @@ -60,7 +60,7 @@ func GetJobOutput(ctx context.Context, w *databricks.WorkspaceClient, runId int6 return nil, err } result := &JobOutput{ - TaskOutputs: make([]TaskOutput, len(jobRun.Tasks)), + TaskOutputs: make([]TaskOutput, 0), } for _, task := range jobRun.Tasks { jobRunOutput, err := w.Jobs.GetRunOutput(ctx, jobs.GetRunOutputRequest{ @@ -69,7 +69,11 @@ func GetJobOutput(ctx context.Context, w *databricks.WorkspaceClient, runId int6 if err != nil { return nil, err } - task := TaskOutput{TaskKey: task.TaskKey, Output: toRunOutput(jobRunOutput), EndTime: task.EndTime} + out := toRunOutput(jobRunOutput) + if out == nil { + continue + } + task := TaskOutput{TaskKey: task.TaskKey, Output: out, EndTime: task.EndTime} result.TaskOutputs = append(result.TaskOutputs, task) } return result, nil diff --git a/bundle/run/pipeline.go b/bundle/run/pipeline.go index 7b82c3eae..216712d30 100644 --- a/bundle/run/pipeline.go +++ b/bundle/run/pipeline.go @@ -136,6 +136,13 @@ type pipelineRunner struct { pipeline *resources.Pipeline } +func (r *pipelineRunner) Name() string { + if r.pipeline == nil || r.pipeline.PipelineSpec == nil { + return "" + } + return r.pipeline.PipelineSpec.Name +} + func (r *pipelineRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) { var pipelineID = r.pipeline.ID diff --git a/bundle/run/runner.go b/bundle/run/runner.go index 227e12d97..7d3c2c297 100644 --- a/bundle/run/runner.go +++ b/bundle/run/runner.go @@ -21,6 +21,9 @@ type Runner interface { // This is used for showing the user hints w.r.t. disambiguation. Key() string + // Name returns the resource's name, if defined. + Name() string + // Run the underlying worklow. Run(ctx context.Context, opts *Options) (output.RunOutput, error) } diff --git a/bundle/schema/README.md b/bundle/schema/README.md index 4df43cf23..fe6b149c1 100644 --- a/bundle/schema/README.md +++ b/bundle/schema/README.md @@ -3,7 +3,7 @@ `docs/bundle_descriptions.json` contains both autogenerated as well as manually written descriptions for the json schema. Specifically 1. `resources` : almost all descriptions are autogenerated from the OpenAPI spec -2. `environments` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`) +2. `targets` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`) 3. `bundle` : manually editted 4. `include` : manually editted 5. `workspace` : manually editted @@ -17,7 +17,7 @@ These descriptions are rendered in the inline documentation in an IDE `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` 2. Manually edit bundle_descriptions.json to add your descriptions 3. Build again to embed the new `bundle_descriptions.json` into the binary (`go build`) -4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments` +4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `targets` 5. push to repo diff --git a/bundle/schema/docs.go b/bundle/schema/docs.go index 13a4549d0..4b2fd36ae 100644 --- a/bundle/schema/docs.go +++ b/bundle/schema/docs.go @@ -8,6 +8,7 @@ import ( "reflect" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/databricks-sdk-go/openapi" ) @@ -39,7 +40,7 @@ func BundleDocs(openapiSpecPath string) (*Docs, error) { } openapiReader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } resourcesDocs, err := openapiReader.ResourcesDocs() if err != nil { @@ -51,20 +52,20 @@ func BundleDocs(openapiSpecPath string) (*Docs, error) { } docs.Properties["resources"] = schemaToDocs(resourceSchema) } - docs.refreshEnvironmentsDocs() + docs.refreshTargetsDocs() return docs, nil } -func (docs *Docs) refreshEnvironmentsDocs() error { - environmentsDocs, ok := docs.Properties["environments"] - if !ok || environmentsDocs.AdditionalProperties == nil || - environmentsDocs.AdditionalProperties.Properties == nil { - return fmt.Errorf("invalid environments descriptions") +func (docs *Docs) refreshTargetsDocs() error { + targetsDocs, ok := docs.Properties["targets"] + if !ok || targetsDocs.AdditionalProperties == nil || + targetsDocs.AdditionalProperties.Properties == nil { + return fmt.Errorf("invalid targets descriptions") } - environmentProperties := environmentsDocs.AdditionalProperties.Properties + targetProperties := targetsDocs.AdditionalProperties.Properties propertiesToCopy := []string{"artifacts", "bundle", "resources", "workspace"} for _, p := range propertiesToCopy { - environmentProperties[p] = docs.Properties[p] + targetProperties[p] = docs.Properties[p] } return nil } @@ -88,22 +89,22 @@ func initializeBundleDocs() (*Docs, error) { } // *Docs are a subset of *Schema, this function selects that subset -func schemaToDocs(schema *Schema) *Docs { +func schemaToDocs(jsonSchema *jsonschema.Schema) *Docs { // terminate recursion if schema is nil - if schema == nil { + if jsonSchema == nil { return nil } docs := &Docs{ - Description: schema.Description, + Description: jsonSchema.Description, } - if len(schema.Properties) > 0 { + if len(jsonSchema.Properties) > 0 { docs.Properties = make(map[string]*Docs) } - for k, v := range schema.Properties { + for k, v := range jsonSchema.Properties { docs.Properties[k] = schemaToDocs(v) } - docs.Items = schemaToDocs(schema.Items) - if additionalProperties, ok := schema.AdditionalProperties.(*Schema); ok { + docs.Items = schemaToDocs(jsonSchema.Items) + if additionalProperties, ok := jsonSchema.AdditionalProperties.(*jsonschema.Schema); ok { docs.AdditionalProperties = schemaToDocs(additionalProperties) } return docs diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index 7734614ec..98f3cf8d0 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -36,7 +36,7 @@ } } }, - "environments": { + "targets": { "description": "", "additionalproperties": { "description": "", @@ -1441,6 +1441,86 @@ } } }, + "model_serving_endpoints": { + "description": "List of Model Serving Endpoints", + "additionalproperties": { + "description": "", + "properties": { + "name": { + "description": "The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name." + }, + "permissions": { + "description": "", + "items": { + "description": "", + "properties": { + "group_name": { + "description": "" + }, + "level": { + "description": "" + }, + "service_principal_name": { + "description": "" + }, + "user_name": { + "description": "" + } + } + } + }, + "config": { + "description": "The model serving endpoint configuration.", + "properties": { + "properties": { + "served_models": { + "description": "Each block represents a served model for the endpoint to serve. A model serving endpoint can have up to 10 served models.", + "items": { + "description": "", + "properties" : { + "name": { + "description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to modelname-modelversion. A served model name can consist of alphanumeric characters, dashes, and underscores." + }, + "model_name": { + "description": "The name of the model in Databricks Model Registry to be served." + }, + "model_version": { + "description": "The version of the model in Databricks Model Registry to be served." + }, + "workload_size": { + "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency)." + }, + "scale_to_zero_enabled": { + "description": "Whether the compute resources for the served model should scale down to zero. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0." + } + } + } + }, + "traffic_config": { + "description": "A single block represents the traffic split configuration amongst the served models.", + "properties": { + "routes": { + "description": "Each block represents a route that defines traffic to each served model. Each served_models block needs to have a corresponding routes block.", + "items": { + "description": "", + "properties": { + "served_model_name": { + "description": "The name of the served model this route configures traffic for. This needs to match the name of a served_models block." + }, + "traffic_percentage": { + "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive." + } + } + } + } + } + } + } + } + } + } + } + }, "pipelines": { "description": "List of DLT pipelines", "additionalproperties": { @@ -1788,8 +1868,11 @@ "artifact_path": { "description": "The remote path to synchronize build artifacts to. This defaults to `${workspace.root}/artifacts`" }, + "auth_type": { + "description": "When multiple auth attributes are available in the environment, use the auth type specified by this argument" + }, "azure_client_id": { - "description": "" + "description": "Azure Client ID." }, "azure_environment": { "description": "Azure environment, one of (Public, UsGov, China, Germany)." @@ -1798,28 +1881,33 @@ "description": "Azure Login Application ID." }, "azure_tenant_id": { - "description": "" + "description": "Azure Tenant ID." }, "azure_use_msi": { - "description": "" + "description": "Whether to use Managed Service Identity (MSI) to authenticate to Azure Databricks." }, "azure_workspace_resource_id": { "description": "Azure Resource Manager ID for Azure Databricks workspace." }, + "client_id": { + "description": "OAath client ID for the Databricks workspace." + }, "file_path": { "description": "The remote path to synchronize local files artifacts to. This defaults to `${workspace.root}/files`" }, "google_service_account": { - "description": "" }, "host": { "description": "Host url of the workspace." }, + "metadata_service_url": { + "description": "The URL of the metadata service to use for authentication." + }, "profile": { "description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg." }, "root_path": { - "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.environment}`" + "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.target}`" }, "state_path": { "description": "The remote path to synchronize bundle state to. This defaults to `${workspace.root}/state`" @@ -1830,7 +1918,7 @@ } }, "include": { - "description": "A list of patterns of file names to load and merge into the this configuration. It defaults to loading `*.yml` and `*/*.yml`.", + "description": "A list of glob patterns of files to load and merge into the this configuration. Defaults to no files being included.", "items": { "description": "" } @@ -3583,7 +3671,7 @@ "description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg." }, "root_path": { - "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.environment}`" + "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.target}`" }, "state_path": { "description": "The remote path to synchronize bundle state to. This defaults to `${workspace.root}/state`" @@ -3591,4 +3679,4 @@ } } } -} \ No newline at end of file +} diff --git a/bundle/schema/docs_test.go b/bundle/schema/docs_test.go index 84d804b07..83ee681b0 100644 --- a/bundle/schema/docs_test.go +++ b/bundle/schema/docs_test.go @@ -4,30 +4,31 @@ import ( "encoding/json" "testing" + "github.com/databricks/cli/libs/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestSchemaToDocs(t *testing.T) { - schema := &Schema{ + jsonSchema := &jsonschema.Schema{ Type: "object", Description: "root doc", - Properties: map[string]*Schema{ + Properties: map[string]*jsonschema.Schema{ "foo": {Type: "number", Description: "foo doc"}, "bar": {Type: "string"}, "octave": { Type: "object", - AdditionalProperties: &Schema{Type: "number"}, + AdditionalProperties: &jsonschema.Schema{Type: "number"}, Description: "octave docs", }, "scales": { Type: "object", Description: "scale docs", - Items: &Schema{Type: "string"}, + Items: &jsonschema.Schema{Type: "string"}, }, }, } - docs := schemaToDocs(schema) + docs := schemaToDocs(jsonSchema) docsJson, err := json.MarshalIndent(docs, " ", " ") require.NoError(t, err) diff --git a/bundle/schema/openapi.go b/bundle/schema/openapi.go index 9b4b27dd9..1a8b76ed9 100644 --- a/bundle/schema/openapi.go +++ b/bundle/schema/openapi.go @@ -5,17 +5,18 @@ import ( "fmt" "strings" + "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/databricks-sdk-go/openapi" ) type OpenapiReader struct { OpenapiSpec *openapi.Specification - Memo map[string]*Schema + Memo map[string]*jsonschema.Schema } const SchemaPathPrefix = "#/components/schemas/" -func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { +func (reader *OpenapiReader) readOpenapiSchema(path string) (*jsonschema.Schema, error) { schemaKey := strings.TrimPrefix(path, SchemaPathPrefix) // return early if we already have a computed schema @@ -35,7 +36,7 @@ func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { if err != nil { return nil, err } - jsonSchema := &Schema{} + jsonSchema := &jsonschema.Schema{} err = json.Unmarshal(bytes, jsonSchema) if err != nil { return nil, err @@ -50,7 +51,7 @@ func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { if err != nil { return nil, err } - additionalProperties := &Schema{} + additionalProperties := &jsonschema.Schema{} err = json.Unmarshal(b, additionalProperties) if err != nil { return nil, err @@ -65,7 +66,7 @@ func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { } // safe againt loops in refs -func (reader *OpenapiReader) safeResolveRefs(root *Schema, tracker *tracker) (*Schema, error) { +func (reader *OpenapiReader) safeResolveRefs(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) { if root.Reference == nil { return reader.traverseSchema(root, tracker) } @@ -100,9 +101,9 @@ func (reader *OpenapiReader) safeResolveRefs(root *Schema, tracker *tracker) (*S return root, err } -func (reader *OpenapiReader) traverseSchema(root *Schema, tracker *tracker) (*Schema, error) { +func (reader *OpenapiReader) traverseSchema(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) { // case primitive (or invalid) - if root.Type != Object && root.Type != Array { + if root.Type != jsonschema.ObjectType && root.Type != jsonschema.ArrayType { return root, nil } // only root references are resolved @@ -128,9 +129,9 @@ func (reader *OpenapiReader) traverseSchema(root *Schema, tracker *tracker) (*Sc root.Items = itemsSchema } // case map - additionionalProperties, ok := root.AdditionalProperties.(*Schema) - if ok && additionionalProperties != nil { - valueSchema, err := reader.safeResolveRefs(additionionalProperties, tracker) + additionalProperties, ok := root.AdditionalProperties.(*jsonschema.Schema) + if ok && additionalProperties != nil { + valueSchema, err := reader.safeResolveRefs(additionalProperties, tracker) if err != nil { return nil, err } @@ -139,7 +140,7 @@ func (reader *OpenapiReader) traverseSchema(root *Schema, tracker *tracker) (*Sc return root, nil } -func (reader *OpenapiReader) readResolvedSchema(path string) (*Schema, error) { +func (reader *OpenapiReader) readResolvedSchema(path string) (*jsonschema.Schema, error) { root, err := reader.readOpenapiSchema(path) if err != nil { return nil, err @@ -209,6 +210,19 @@ func (reader *OpenapiReader) modelsDocs() (*Docs, error) { return modelsDocs, nil } +func (reader *OpenapiReader) modelServingEndpointsDocs() (*Docs, error) { + modelServingEndpointsSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "serving.CreateServingEndpoint") + if err != nil { + return nil, err + } + modelServingEndpointsDocs := schemaToDocs(modelServingEndpointsSpecSchema) + modelServingEndpointsAllDocs := &Docs{ + Description: "List of Model Serving Endpoints", + AdditionalProperties: modelServingEndpointsDocs, + } + return modelServingEndpointsAllDocs, nil +} + func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) { jobsDocs, err := reader.jobsDocs() if err != nil { @@ -226,14 +240,19 @@ func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) { if err != nil { return nil, err } + modelServingEndpointsDocs, err := reader.modelServingEndpointsDocs() + if err != nil { + return nil, err + } return &Docs{ Description: "Collection of Databricks resources to deploy.", Properties: map[string]*Docs{ - "jobs": jobsDocs, - "pipelines": pipelinesDocs, - "experiments": experimentsDocs, - "models": modelsDocs, + "jobs": jobsDocs, + "pipelines": pipelinesDocs, + "experiments": experimentsDocs, + "models": modelsDocs, + "model_serving_endpoints": modelServingEndpointsDocs, }, }, nil } diff --git a/bundle/schema/openapi_test.go b/bundle/schema/openapi_test.go index 282fac8df..0d71fa440 100644 --- a/bundle/schema/openapi_test.go +++ b/bundle/schema/openapi_test.go @@ -4,6 +4,7 @@ import ( "encoding/json" "testing" + "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/databricks-sdk-go/openapi" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -47,7 +48,7 @@ func TestReadSchemaForObject(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -105,7 +106,7 @@ func TestReadSchemaForArray(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -151,7 +152,7 @@ func TestReadSchemaForMap(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -200,7 +201,7 @@ func TestRootReferenceIsResolved(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -250,7 +251,7 @@ func TestSelfReferenceLoopErrors(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -284,7 +285,7 @@ func TestCrossReferenceLoopErrors(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -329,7 +330,7 @@ func TestReferenceResolutionForMapInObject(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -399,7 +400,7 @@ func TestReferenceResolutionForArrayInObject(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) diff --git a/bundle/schema/schema.go b/bundle/schema/schema.go index 7a55cbd2b..00dd27192 100644 --- a/bundle/schema/schema.go +++ b/bundle/schema/schema.go @@ -5,40 +5,17 @@ import ( "fmt" "reflect" "strings" + + "github.com/databricks/cli/libs/jsonschema" ) -// defines schema for a json object -type Schema struct { - // Type of the object - Type JavascriptType `json:"type,omitempty"` +// Fields tagged "readonly" should not be emitted in the schema as they are +// computed at runtime, and should not be assigned a value by the bundle author. +const readonlyTag = "readonly" - // Description of the object. This is rendered as inline documentation in the - // IDE. This is manually injected here using schema.Docs - Description string `json:"description,omitempty"` - - // Schemas for the fields of an struct. The keys are the first json tag. - // The values are the schema for the type of the field - Properties map[string]*Schema `json:"properties,omitempty"` - - // The schema for all values of an array - Items *Schema `json:"items,omitempty"` - - // The schema for any properties not mentioned in the Schema.Properties field. - // this validates maps[string]any in bundle configuration - // OR - // A boolean type with value false. Setting false here validates that all - // properties in the config have been defined in the json schema as properties - // - // Its type during runtime will either be *Schema or bool - AdditionalProperties any `json:"additionalProperties,omitempty"` - - // Required properties for the object. Any fields missing the "omitempty" - // json tag will be included - Required []string `json:"required,omitempty"` - - // URI to a json schema - Reference *string `json:"$ref,omitempty"` -} +// Annotation for internal bundle fields that should not be exposed to customers. +// Fields can be tagged as "internal" to remove them from the generated schema. +const internalTag = "internal" // This function translates golang types into json schema. Here is the mapping // between json schema types and golang types @@ -61,7 +38,7 @@ type Schema struct { // // - []MyStruct -> {type: object, properties: {}, additionalProperties: false} // for details visit: https://json-schema.org/understanding-json-schema/reference/object.html#properties -func New(golangType reflect.Type, docs *Docs) (*Schema, error) { +func New(golangType reflect.Type, docs *Docs) (*jsonschema.Schema, error) { tracker := newTracker() schema, err := safeToSchema(golangType, docs, "", tracker) if err != nil { @@ -70,39 +47,28 @@ func New(golangType reflect.Type, docs *Docs) (*Schema, error) { return schema, nil } -type JavascriptType string - -const ( - Invalid JavascriptType = "invalid" - Boolean JavascriptType = "boolean" - String JavascriptType = "string" - Number JavascriptType = "number" - Object JavascriptType = "object" - Array JavascriptType = "array" -) - -func javascriptType(golangType reflect.Type) (JavascriptType, error) { +func jsonSchemaType(golangType reflect.Type) (jsonschema.Type, error) { switch golangType.Kind() { case reflect.Bool: - return Boolean, nil + return jsonschema.BooleanType, nil case reflect.String: - return String, nil + return jsonschema.StringType, nil case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64: - return Number, nil + return jsonschema.NumberType, nil case reflect.Struct: - return Object, nil + return jsonschema.ObjectType, nil case reflect.Map: if golangType.Key().Kind() != reflect.String { - return Invalid, fmt.Errorf("only strings map keys are valid. key type: %v", golangType.Key().Kind()) + return jsonschema.InvalidType, fmt.Errorf("only strings map keys are valid. key type: %v", golangType.Key().Kind()) } - return Object, nil + return jsonschema.ObjectType, nil case reflect.Array, reflect.Slice: - return Array, nil + return jsonschema.ArrayType, nil default: - return Invalid, fmt.Errorf("unhandled golang type: %s", golangType) + return jsonschema.InvalidType, fmt.Errorf("unhandled golang type: %s", golangType) } } @@ -121,7 +87,7 @@ func javascriptType(golangType reflect.Type) (JavascriptType, error) { // like array, map or no json tags // // - tracker: Keeps track of types / traceIds seen during recursive traversal -func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker *tracker) (*Schema, error) { +func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker *tracker) (*jsonschema.Schema, error) { // WE ERROR OUT IF THERE ARE CYCLES IN THE JSON SCHEMA // There are mechanisms to deal with cycles though recursive identifiers in json // schema. However if we use them, we would need to make sure we are able to detect @@ -174,29 +140,29 @@ func getStructFields(golangType reflect.Type) []reflect.StructField { return fields } -func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, error) { +func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*jsonschema.Schema, error) { // *Struct and Struct generate identical json schemas if golangType.Kind() == reflect.Pointer { return safeToSchema(golangType.Elem(), docs, "", tracker) } if golangType.Kind() == reflect.Interface { - return &Schema{}, nil + return &jsonschema.Schema{}, nil } - rootJavascriptType, err := javascriptType(golangType) + rootJavascriptType, err := jsonSchemaType(golangType) if err != nil { return nil, err } - schema := &Schema{Type: rootJavascriptType} + jsonSchema := &jsonschema.Schema{Type: rootJavascriptType} if docs != nil { - schema.Description = docs.Description + jsonSchema.Description = docs.Description } // case array/slice if golangType.Kind() == reflect.Array || golangType.Kind() == reflect.Slice { elemGolangType := golangType.Elem() - elemJavascriptType, err := javascriptType(elemGolangType) + elemJavascriptType, err := jsonSchemaType(elemGolangType) if err != nil { return nil, err } @@ -208,7 +174,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e if err != nil { return nil, err } - schema.Items = &Schema{ + jsonSchema.Items = &jsonschema.Schema{ Type: elemJavascriptType, Properties: elemProps.Properties, AdditionalProperties: elemProps.AdditionalProperties, @@ -226,7 +192,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e if docs != nil { childDocs = docs.AdditionalProperties } - schema.AdditionalProperties, err = safeToSchema(golangType.Elem(), childDocs, "", tracker) + jsonSchema.AdditionalProperties, err = safeToSchema(golangType.Elem(), childDocs, "", tracker) if err != nil { return nil, err } @@ -235,11 +201,11 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e // case struct if golangType.Kind() == reflect.Struct { children := getStructFields(golangType) - properties := map[string]*Schema{} + properties := map[string]*jsonschema.Schema{} required := []string{} for _, child := range children { bundleTag := child.Tag.Get("bundle") - if bundleTag == "readonly" { + if bundleTag == readonlyTag || bundleTag == internalTag { continue } @@ -281,10 +247,10 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e properties[childName] = fieldProps } - schema.AdditionalProperties = false - schema.Properties = properties - schema.Required = required + jsonSchema.AdditionalProperties = false + jsonSchema.Properties = properties + jsonSchema.Required = required } - return schema, nil + return jsonSchema, nil } diff --git a/bundle/schema/schema_test.go b/bundle/schema/schema_test.go index 66baf8736..d44a2082a 100644 --- a/bundle/schema/schema_test.go +++ b/bundle/schema/schema_test.go @@ -1462,3 +1462,55 @@ func TestBundleReadOnlytag(t *testing.T) { t.Log("[DEBUG] expected: ", expected) assert.Equal(t, expected, string(jsonSchema)) } + +func TestBundleInternalTag(t *testing.T) { + type Pokemon struct { + Pikachu string `json:"pikachu" bundle:"internal"` + Raichu string `json:"raichu"` + } + + type Foo struct { + Pokemon *Pokemon `json:"pokemon"` + Apple int `json:"apple"` + Mango string `json:"mango" bundle:"internal"` + } + + elem := Foo{} + + schema, err := New(reflect.TypeOf(elem), nil) + assert.NoError(t, err) + + jsonSchema, err := json.MarshalIndent(schema, " ", " ") + assert.NoError(t, err) + + expected := + `{ + "type": "object", + "properties": { + "apple": { + "type": "number" + }, + "pokemon": { + "type": "object", + "properties": { + "raichu": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "raichu" + ] + } + }, + "additionalProperties": false, + "required": [ + "pokemon", + "apple" + ] + }` + + t.Log("[DEBUG] actual: ", string(jsonSchema)) + t.Log("[DEBUG] expected: ", expected) + assert.Equal(t, expected, string(jsonSchema)) +} diff --git a/bundle/scripts/scripts.go b/bundle/scripts/scripts.go new file mode 100644 index 000000000..1a8a471ca --- /dev/null +++ b/bundle/scripts/scripts.go @@ -0,0 +1,91 @@ +package scripts + +import ( + "bufio" + "context" + "fmt" + "io" + "os/exec" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" +) + +func Execute(hook config.ScriptHook) bundle.Mutator { + return &script{ + scriptHook: hook, + } +} + +type script struct { + scriptHook config.ScriptHook +} + +func (m *script) Name() string { + return fmt.Sprintf("scripts.%s", m.scriptHook) +} + +func (m *script) Apply(ctx context.Context, b *bundle.Bundle) error { + cmd, out, err := executeHook(ctx, b, m.scriptHook) + if err != nil { + return err + } + if cmd == nil { + log.Debugf(ctx, "No script defined for %s, skipping", m.scriptHook) + return nil + } + + cmdio.LogString(ctx, fmt.Sprintf("Executing '%s' script", m.scriptHook)) + + reader := bufio.NewReader(out) + line, err := reader.ReadString('\n') + for err == nil { + cmdio.LogString(ctx, strings.TrimSpace(line)) + line, err = reader.ReadString('\n') + } + + return cmd.Wait() +} + +func executeHook(ctx context.Context, b *bundle.Bundle, hook config.ScriptHook) (*exec.Cmd, io.Reader, error) { + command := getCommmand(b, hook) + if command == "" { + return nil, nil, nil + } + + interpreter, err := findInterpreter() + if err != nil { + return nil, nil, err + } + + cmd := exec.CommandContext(ctx, interpreter, "-c", string(command)) + cmd.Dir = b.Config.Path + + outPipe, err := cmd.StdoutPipe() + if err != nil { + return nil, nil, err + } + + errPipe, err := cmd.StderrPipe() + if err != nil { + return nil, nil, err + } + + return cmd, io.MultiReader(outPipe, errPipe), cmd.Start() +} + +func getCommmand(b *bundle.Bundle, hook config.ScriptHook) config.Command { + if b.Config.Experimental == nil || b.Config.Experimental.Scripts == nil { + return "" + } + + return b.Config.Experimental.Scripts[hook] +} + +func findInterpreter() (string, error) { + // At the moment we just return 'sh' on all platforms and use it to execute scripts + return "sh", nil +} diff --git a/bundle/scripts/scripts_test.go b/bundle/scripts/scripts_test.go new file mode 100644 index 000000000..8b7aa0d1b --- /dev/null +++ b/bundle/scripts/scripts_test.go @@ -0,0 +1,32 @@ +package scripts + +import ( + "bufio" + "context" + "strings" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/require" +) + +func TestExecutesHook(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Experimental: &config.Experimental{ + Scripts: map[config.ScriptHook]config.Command{ + config.ScriptPreBuild: "echo 'Hello'", + }, + }, + }, + } + _, out, err := executeHook(context.Background(), b, config.ScriptPreBuild) + require.NoError(t, err) + + reader := bufio.NewReader(out) + line, err := reader.ReadString('\n') + + require.NoError(t, err) + require.Equal(t, "Hello", strings.TrimSpace(line)) +} diff --git a/bundle/tests/autoload_git/bundle.yml b/bundle/tests/autoload_git/bundle.yml deleted file mode 100644 index d0e1de60f..000000000 --- a/bundle/tests/autoload_git/bundle.yml +++ /dev/null @@ -1,4 +0,0 @@ -bundle: - name: autoload git config test - git: - branch: foo diff --git a/bundle/tests/autoload_git/databricks.yml b/bundle/tests/autoload_git/databricks.yml new file mode 100644 index 000000000..92ab8d66a --- /dev/null +++ b/bundle/tests/autoload_git/databricks.yml @@ -0,0 +1,11 @@ +bundle: + name: autoload git config test + +targets: + development: + default: true + + production: + # production can only be deployed from the 'main' branch + git: + branch: main diff --git a/bundle/tests/autoload_git_test.go b/bundle/tests/autoload_git_test.go deleted file mode 100644 index 87c7180e7..000000000 --- a/bundle/tests/autoload_git_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package config_tests - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGitConfig(t *testing.T) { - b := load(t, "./autoload_git") - assert.Equal(t, "foo", b.Config.Bundle.Git.Branch) - sshUrl := "git@github.com:databricks/cli.git" - httpsUrl := "https://github.com/databricks/cli" - assert.Contains(t, []string{sshUrl, httpsUrl}, b.Config.Bundle.Git.OriginURL) -} diff --git a/bundle/tests/basic/bundle.yml b/bundle/tests/basic/databricks.yml similarity index 100% rename from bundle/tests/basic/bundle.yml rename to bundle/tests/basic/databricks.yml diff --git a/bundle/tests/bundle/python_wheel/.gitignore b/bundle/tests/bundle/python_wheel/.gitignore new file mode 100644 index 000000000..f03e23bc2 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/.gitignore @@ -0,0 +1,3 @@ +build/ +*.egg-info +.databricks diff --git a/bundle/tests/bundle/python_wheel/bundle.yml b/bundle/tests/bundle/python_wheel/bundle.yml new file mode 100644 index 000000000..c82ff83f7 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/bundle.yml @@ -0,0 +1,21 @@ +bundle: + name: python-wheel + +artifacts: + my_test_code: + type: whl + path: "./my_test_code" + build: "python3 setup.py bdist_wheel" + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-132531-5opeqon1" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: ./my_test_code/dist/*.whl diff --git a/bundle/tests/bundle/python_wheel/my_test_code/setup.py b/bundle/tests/bundle/python_wheel/my_test_code/setup.py new file mode 100644 index 000000000..0bd871dd3 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/my_test_code/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import src + +setup( + name="my_test_code", + version=src.__version__, + author=src.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["src"]), + entry_points={"group_1": "run=src.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py b/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py new file mode 100644 index 000000000..909f1f322 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py @@ -0,0 +1,2 @@ +__version__ = "0.0.1" +__author__ = "Databricks" diff --git a/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py b/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py new file mode 100644 index 000000000..73d045afb --- /dev/null +++ b/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py @@ -0,0 +1,16 @@ +""" +The entry point of the Python Wheel +""" + +import sys + + +def main(): + # This method will print the provided arguments + print('Hello from my func') + print('Got arguments:') + print(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml b/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml new file mode 100644 index 000000000..07f4957bb --- /dev/null +++ b/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml @@ -0,0 +1,15 @@ +bundle: + name: python-wheel + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-132531-5opeqon1" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: dbfs:/path/to/dist/mywheel.whl diff --git a/bundle/tests/bundle/python_wheel_no_artifact/.gitignore b/bundle/tests/bundle/python_wheel_no_artifact/.gitignore new file mode 100644 index 000000000..f03e23bc2 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/.gitignore @@ -0,0 +1,3 @@ +build/ +*.egg-info +.databricks diff --git a/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml b/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml new file mode 100644 index 000000000..88cb47be5 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml @@ -0,0 +1,15 @@ +bundle: + name: python-wheel + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-aaaaa-bbbbbb" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: ./dist/*.whl diff --git a/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py new file mode 100644 index 000000000..909f1f322 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py @@ -0,0 +1,2 @@ +__version__ = "0.0.1" +__author__ = "Databricks" diff --git a/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py new file mode 100644 index 000000000..73d045afb --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py @@ -0,0 +1,16 @@ +""" +The entry point of the Python Wheel +""" + +import sys + + +def main(): + # This method will print the provided arguments + print('Hello from my func') + print('Got arguments:') + print(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/bundle/tests/bundle/python_wheel_no_artifact/setup.py b/bundle/tests/bundle/python_wheel_no_artifact/setup.py new file mode 100644 index 000000000..7a1317b2f --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + name="my_test_code", + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group_1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore new file mode 100644 index 000000000..f03e23bc2 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore @@ -0,0 +1,3 @@ +build/ +*.egg-info +.databricks diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml new file mode 100644 index 000000000..1bac4ebad --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml @@ -0,0 +1,22 @@ +bundle: + name: python-wheel-local + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-aaaaa-bbbbbb" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: ./package/*.whl + - task_key: TestTask2 + existing_cluster_id: "0717-aaaaa-bbbbbb" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: ./non-existing/*.whl diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl new file mode 100644 index 000000000..14702281d Binary files /dev/null and b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl differ diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go new file mode 100644 index 000000000..f7f0e75e5 --- /dev/null +++ b/bundle/tests/bundle/wheel_test.go @@ -0,0 +1,88 @@ +package bundle + +import ( + "context" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/bundle/phases" + "github.com/stretchr/testify/require" +) + +func TestBundlePythonWheelBuild(t *testing.T) { + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(ctx, b) + require.NoError(t, err) + + matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") + require.NoError(t, err) + require.Equal(t, 1, len(matches)) + + match := libraries.MatchWithArtifacts() + err = match.Apply(ctx, b) + require.NoError(t, err) +} + +func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel_no_artifact") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(ctx, b) + require.NoError(t, err) + + matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") + require.NoError(t, err) + require.Equal(t, 1, len(matches)) + + match := libraries.MatchWithArtifacts() + err = match.Apply(ctx, b) + require.NoError(t, err) +} + +func TestBundlePythonWheelWithDBFSLib(t *testing.T) { + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel_dbfs_lib") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(ctx, b) + require.NoError(t, err) + + match := libraries.MatchWithArtifacts() + err = match.Apply(ctx, b) + require.NoError(t, err) +} + +func TestBundlePythonWheelBuildNoBuildJustUpload(t *testing.T) { + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel_no_artifact_no_setup") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(ctx, b) + require.NoError(t, err) + + match := libraries.MatchWithArtifacts() + err = match.Apply(ctx, b) + require.ErrorContains(t, err, "./non-existing/*.whl") + + require.NotZero(t, len(b.Config.Artifacts)) + + artifact := b.Config.Artifacts["my_test_code-0.0.1-py3-none-any.whl"] + require.NotNil(t, artifact) + require.Empty(t, artifact.BuildCommand) + require.Contains(t, artifact.Files[0].Source, filepath.Join( + b.Config.Path, + "package", + "my_test_code-0.0.1-py3-none-any.whl", + )) + require.True(t, artifact.Files[0].NeedsUpload()) +} diff --git a/bundle/tests/conflicting_resource_ids/no_subconfigurations/bundle.yml b/bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/no_subconfigurations/bundle.yml rename to bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml diff --git a/bundle/tests/conflicting_resource_ids/one_subconfiguration/bundle.yml b/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml similarity index 81% rename from bundle/tests/conflicting_resource_ids/one_subconfiguration/bundle.yml rename to bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml index a81602920..ea4dec2e1 100644 --- a/bundle/tests/conflicting_resource_ids/one_subconfiguration/bundle.yml +++ b/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml @@ -4,6 +4,9 @@ bundle: workspace: profile: test +include: + - "*.yml" + resources: jobs: foo: diff --git a/bundle/tests/conflicting_resource_ids/two_subconfigurations/bundle.yml b/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml similarity index 69% rename from bundle/tests/conflicting_resource_ids/two_subconfigurations/bundle.yml rename to bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml index f8fe99ebc..c1da3eaeb 100644 --- a/bundle/tests/conflicting_resource_ids/two_subconfigurations/bundle.yml +++ b/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml @@ -3,3 +3,6 @@ bundle: workspace: profile: test + +include: + - "*.yml" diff --git a/bundle/tests/conflicting_resource_ids_test.go b/bundle/tests/conflicting_resource_ids_test.go index 12f460fde..704683ad5 100644 --- a/bundle/tests/conflicting_resource_ids_test.go +++ b/bundle/tests/conflicting_resource_ids_test.go @@ -13,24 +13,27 @@ import ( ) func TestConflictingResourceIdsNoSubconfig(t *testing.T) { - _, err := bundle.Load("./conflicting_resource_ids/no_subconfigurations") - bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/bundle.yml") + ctx := context.Background() + _, err := bundle.Load(ctx, "./conflicting_resource_ids/no_subconfigurations") + bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/databricks.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath)) } func TestConflictingResourceIdsOneSubconfig(t *testing.T) { - b, err := bundle.Load("./conflicting_resource_ids/one_subconfiguration") + ctx := context.Background() + b, err := bundle.Load(ctx, "./conflicting_resource_ids/one_subconfiguration") require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) - bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/bundle.yml") + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml") resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) } func TestConflictingResourceIdsTwoSubconfigs(t *testing.T) { - b, err := bundle.Load("./conflicting_resource_ids/two_subconfigurations") + ctx := context.Background() + b, err := bundle.Load(ctx, "./conflicting_resource_ids/two_subconfigurations") require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) resources1ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources1.yml") resources2ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources2.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath)) diff --git a/bundle/tests/environment_empty/bundle.yml b/bundle/tests/environment_empty/bundle.yml deleted file mode 100644 index 17c03c8dc..000000000 --- a/bundle/tests/environment_empty/bundle.yml +++ /dev/null @@ -1,5 +0,0 @@ -bundle: - name: environment_empty - -environments: - development: diff --git a/bundle/tests/environment_empty_test.go b/bundle/tests/environment_empty_test.go deleted file mode 100644 index fb2e33416..000000000 --- a/bundle/tests/environment_empty_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package config_tests - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEnvironmentEmpty(t *testing.T) { - b := loadEnvironment(t, "./environment_empty", "development") - assert.Equal(t, "development", b.Config.Bundle.Environment) -} diff --git a/bundle/tests/environment_git_test.go b/bundle/tests/environment_git_test.go new file mode 100644 index 000000000..bb10825e4 --- /dev/null +++ b/bundle/tests/environment_git_test.go @@ -0,0 +1,20 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGitAutoLoadWithEnvironment(t *testing.T) { + b := load(t, "./environments_autoload_git") + assert.True(t, b.Config.Bundle.Git.Inferred) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestGitManuallySetBranchWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_autoload_git", "production") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "main", b.Config.Bundle.Git.Branch) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} diff --git a/bundle/tests/environment_overrides/resources/databricks.yml b/bundle/tests/environment_overrides/resources/databricks.yml new file mode 100644 index 000000000..df261ba03 --- /dev/null +++ b/bundle/tests/environment_overrides/resources/databricks.yml @@ -0,0 +1,36 @@ +bundle: + name: environment_overrides + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + job1: + name: "base job" + + pipelines: + boolean1: + photon: true + + boolean2: + photon: false + +environments: + development: + default: true + + staging: + resources: + jobs: + job1: + name: "staging job" + + pipelines: + boolean1: + # Note: setting a property to a zero value (in Go) does not have effect. + # See the corresponding test for details. + photon: false + + boolean2: + photon: true diff --git a/bundle/tests/environment_overrides/bundle.yml b/bundle/tests/environment_overrides/workspace/databricks.yml similarity index 100% rename from bundle/tests/environment_overrides/bundle.yml rename to bundle/tests/environment_overrides/workspace/databricks.yml diff --git a/bundle/tests/environment_overrides_test.go b/bundle/tests/environment_overrides_test.go index 4b8401c86..91dc2c811 100644 --- a/bundle/tests/environment_overrides_test.go +++ b/bundle/tests/environment_overrides_test.go @@ -6,12 +6,33 @@ import ( "github.com/stretchr/testify/assert" ) -func TestEnvironmentOverridesDev(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides", "development") +func TestEnvironmentOverridesWorkspaceDev(t *testing.T) { + b := loadTarget(t, "./environment_overrides/workspace", "development") assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host) } -func TestEnvironmentOverridesStaging(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides", "staging") +func TestEnvironmentOverridesWorkspaceStaging(t *testing.T) { + b := loadTarget(t, "./environment_overrides/workspace", "staging") assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host) } + +func TestEnvironmentOverridesResourcesDev(t *testing.T) { + b := loadTarget(t, "./environment_overrides/resources", "development") + assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name) + + // Base values are preserved in the development environment. + assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon) + assert.Equal(t, false, b.Config.Resources.Pipelines["boolean2"].Photon) +} + +func TestEnvironmentOverridesResourcesStaging(t *testing.T) { + b := loadTarget(t, "./environment_overrides/resources", "staging") + assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) + + // Overrides are only applied if they are not zero-valued. + // This means that in its current form, we cannot override a true value with a false value. + // Note: this is not desirable and will be addressed by representing our configuration + // in a different structure (e.g. with cty), instead of Go structs. + assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon) + assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon) +} diff --git a/bundle/tests/environments_autoload_git/databricks.yml b/bundle/tests/environments_autoload_git/databricks.yml new file mode 100644 index 000000000..ba4785aed --- /dev/null +++ b/bundle/tests/environments_autoload_git/databricks.yml @@ -0,0 +1,11 @@ +bundle: + name: autoload git config test + +environments: + development: + default: true + + production: + # production can only be deployed from the 'main' branch + git: + branch: main diff --git a/bundle/tests/job_and_pipeline/bundle.yml b/bundle/tests/environments_job_and_pipeline/databricks.yml similarity index 97% rename from bundle/tests/job_and_pipeline/bundle.yml rename to bundle/tests/environments_job_and_pipeline/databricks.yml index d6942e8a7..e29fa0349 100644 --- a/bundle/tests/job_and_pipeline/bundle.yml +++ b/bundle/tests/environments_job_and_pipeline/databricks.yml @@ -23,6 +23,7 @@ environments: development: false production: + mode: production resources: pipelines: nyc_taxi_pipeline: diff --git a/bundle/tests/environments_job_and_pipeline_test.go b/bundle/tests/environments_job_and_pipeline_test.go new file mode 100644 index 000000000..a18daf90c --- /dev/null +++ b/bundle/tests/environments_job_and_pipeline_test.go @@ -0,0 +1,56 @@ +package config_tests + +import ( + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_job_and_pipeline", "development") + assert.Len(t, b.Config.Resources.Jobs, 0) + assert.Len(t, b.Config.Resources.Pipelines, 1) + + p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, b.Config.Bundle.Mode, config.Development) + assert.True(t, p.Development) + require.Len(t, p.Libraries, 1) + assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) + assert.Equal(t, "nyc_taxi_development", p.Target) +} + +func TestJobAndPipelineStagingWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_job_and_pipeline", "staging") + assert.Len(t, b.Config.Resources.Jobs, 0) + assert.Len(t, b.Config.Resources.Pipelines, 1) + + p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.False(t, p.Development) + require.Len(t, p.Libraries, 1) + assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) + assert.Equal(t, "nyc_taxi_staging", p.Target) +} + +func TestJobAndPipelineProductionWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_job_and_pipeline", "production") + assert.Len(t, b.Config.Resources.Jobs, 1) + assert.Len(t, b.Config.Resources.Pipelines, 1) + + p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.False(t, p.Development) + require.Len(t, p.Libraries, 1) + assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) + assert.Equal(t, "nyc_taxi_production", p.Target) + + j := b.Config.Resources.Jobs["pipeline_schedule"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath)) + assert.Equal(t, "Daily refresh of production pipeline", j.Name) + require.Len(t, j.Tasks, 1) + assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId) +} diff --git a/bundle/tests/environments_override_job_cluster/databricks.yml b/bundle/tests/environments_override_job_cluster/databricks.yml new file mode 100644 index 000000000..33061b2e3 --- /dev/null +++ b/bundle/tests/environments_override_job_cluster/databricks.yml @@ -0,0 +1,35 @@ +bundle: + name: override_job_cluster + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + foo: + name: job + job_clusters: + - job_cluster_key: key + new_cluster: + spark_version: 13.3.x-scala2.12 + +environments: + development: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.xlarge + num_workers: 1 + + staging: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.2xlarge + num_workers: 4 diff --git a/bundle/tests/environments_override_job_cluster_test.go b/bundle/tests/environments_override_job_cluster_test.go new file mode 100644 index 000000000..b3ec74453 --- /dev/null +++ b/bundle/tests/environments_override_job_cluster_test.go @@ -0,0 +1,29 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOverrideJobClusterDevWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_override_job_cluster", "development") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 1, c.NewCluster.NumWorkers) +} + +func TestOverrideJobClusterStagingWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_override_job_cluster", "staging") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.2xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 4, c.NewCluster.NumWorkers) +} diff --git a/bundle/tests/git_branch_validation/.mock-git/HEAD b/bundle/tests/git_branch_validation/.mock-git/HEAD new file mode 100644 index 000000000..6c83ec9df --- /dev/null +++ b/bundle/tests/git_branch_validation/.mock-git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/feature-b diff --git a/bundle/tests/git_branch_validation/databricks.yml b/bundle/tests/git_branch_validation/databricks.yml new file mode 100644 index 000000000..8c7b96efc --- /dev/null +++ b/bundle/tests/git_branch_validation/databricks.yml @@ -0,0 +1,4 @@ +bundle: + name: "Dancing Feet" + git: + branch: "feature-a" diff --git a/bundle/tests/git_test.go b/bundle/tests/git_test.go new file mode 100644 index 000000000..c5ae83a20 --- /dev/null +++ b/bundle/tests/git_test.go @@ -0,0 +1,39 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/libs/git" + "github.com/stretchr/testify/assert" +) + +func TestGitAutoLoad(t *testing.T) { + b := load(t, "./autoload_git") + assert.True(t, b.Config.Bundle.Git.Inferred) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestGitManuallySetBranch(t *testing.T) { + b := loadTarget(t, "./autoload_git", "production") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "main", b.Config.Bundle.Git.Branch) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestGitBundleBranchValidation(t *testing.T) { + git.GitDirectoryName = ".mock-git" + t.Cleanup(func() { + git.GitDirectoryName = ".git" + }) + + b := load(t, "./git_branch_validation") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "feature-a", b.Config.Bundle.Git.Branch) + assert.Equal(t, "feature-b", b.Config.Bundle.Git.ActualBranch) + + err := bundle.Apply(context.Background(), b, mutator.ValidateGitDetails()) + assert.ErrorContains(t, err, "not on the right Git branch:") +} diff --git a/bundle/tests/include_default/bundle.yml b/bundle/tests/include_default/databricks.yml similarity index 100% rename from bundle/tests/include_default/bundle.yml rename to bundle/tests/include_default/databricks.yml diff --git a/bundle/tests/include_default/my_second_job/resource.yml b/bundle/tests/include_default/resource.yml similarity index 100% rename from bundle/tests/include_default/my_second_job/resource.yml rename to bundle/tests/include_default/resource.yml diff --git a/bundle/tests/include_default_test.go b/bundle/tests/include_default_test.go deleted file mode 100644 index dc7dbcd9c..000000000 --- a/bundle/tests/include_default_test.go +++ /dev/null @@ -1,27 +0,0 @@ -package config_tests - -import ( - "path/filepath" - "sort" - "testing" - - "github.com/stretchr/testify/assert" - "golang.org/x/exp/maps" -) - -func TestIncludeDefault(t *testing.T) { - b := load(t, "./include_default") - - // Test that both jobs were loaded. - keys := maps.Keys(b.Config.Resources.Jobs) - sort.Strings(keys) - assert.Equal(t, []string{"my_first_job", "my_second_job"}, keys) - - first := b.Config.Resources.Jobs["my_first_job"] - assert.Equal(t, "1", first.ID) - assert.Equal(t, "include_default/my_first_job/resource.yml", filepath.ToSlash(first.ConfigFilePath)) - - second := b.Config.Resources.Jobs["my_second_job"] - assert.Equal(t, "2", second.ID) - assert.Equal(t, "include_default/my_second_job/resource.yml", filepath.ToSlash(second.ConfigFilePath)) -} diff --git a/bundle/tests/include_invalid/bundle.yml b/bundle/tests/include_invalid/databricks.yml similarity index 100% rename from bundle/tests/include_invalid/bundle.yml rename to bundle/tests/include_invalid/databricks.yml diff --git a/bundle/tests/include_multiple/databricks.yml b/bundle/tests/include_multiple/databricks.yml new file mode 100644 index 000000000..ca3ff8545 --- /dev/null +++ b/bundle/tests/include_multiple/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: include_default + +include: + - "*/*.yml" diff --git a/bundle/tests/include_override/this_file_isnt_included.yml b/bundle/tests/include_multiple/my_first_job/resource.yml similarity index 53% rename from bundle/tests/include_override/this_file_isnt_included.yml rename to bundle/tests/include_multiple/my_first_job/resource.yml index c9ba1452f..c2be5a160 100644 --- a/bundle/tests/include_override/this_file_isnt_included.yml +++ b/bundle/tests/include_multiple/my_first_job/resource.yml @@ -1,4 +1,4 @@ resources: jobs: - this_job_isnt_defined: + my_first_job: id: 1 diff --git a/bundle/tests/include_multiple/my_second_job/resource.yml b/bundle/tests/include_multiple/my_second_job/resource.yml new file mode 100644 index 000000000..2c28c4622 --- /dev/null +++ b/bundle/tests/include_multiple/my_second_job/resource.yml @@ -0,0 +1,4 @@ +resources: + jobs: + my_second_job: + id: 2 diff --git a/bundle/tests/include_override/bundle.yml b/bundle/tests/include_override/bundle.yml deleted file mode 100644 index 02de362cd..000000000 --- a/bundle/tests/include_override/bundle.yml +++ /dev/null @@ -1,7 +0,0 @@ -bundle: - name: include_override - -# Setting this explicitly means default globs are not processed. -# As a result, ./this_file_isnt_included.yml isn't included. -include: - - doesnt-exist/*.yml diff --git a/bundle/tests/include_override_test.go b/bundle/tests/include_override_test.go deleted file mode 100644 index 0e18fab34..000000000 --- a/bundle/tests/include_override_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package config_tests - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestIncludeOverride(t *testing.T) { - b := load(t, "./include_override") - assert.Empty(t, b.Config.Resources.Jobs) -} diff --git a/bundle/tests/include_test.go b/bundle/tests/include_test.go index d704b8380..eb09d1aa0 100644 --- a/bundle/tests/include_test.go +++ b/bundle/tests/include_test.go @@ -14,9 +14,10 @@ import ( ) func TestIncludeInvalid(t *testing.T) { - b, err := bundle.Load("./include_invalid") + ctx := context.Background() + b, err := bundle.Load(ctx, "./include_invalid") require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) require.Error(t, err) assert.Contains(t, err.Error(), "notexists.yml defined in 'include' section does not match any files") } @@ -32,3 +33,27 @@ func TestIncludeWithGlob(t *testing.T) { assert.Equal(t, "1", job.ID) assert.Equal(t, "include_with_glob/job.yml", filepath.ToSlash(job.ConfigFilePath)) } + +func TestIncludeDefault(t *testing.T) { + b := load(t, "./include_default") + + // No jobs should have been loaded + assert.Empty(t, b.Config.Resources.Jobs) +} + +func TestIncludeForMultipleMatches(t *testing.T) { + b := load(t, "./include_multiple") + + // Test that both jobs were loaded. + keys := maps.Keys(b.Config.Resources.Jobs) + sort.Strings(keys) + assert.Equal(t, []string{"my_first_job", "my_second_job"}, keys) + + first := b.Config.Resources.Jobs["my_first_job"] + assert.Equal(t, "1", first.ID) + assert.Equal(t, "include_multiple/my_first_job/resource.yml", filepath.ToSlash(first.ConfigFilePath)) + + second := b.Config.Resources.Jobs["my_second_job"] + assert.Equal(t, "2", second.ID) + assert.Equal(t, "include_multiple/my_second_job/resource.yml", filepath.ToSlash(second.ConfigFilePath)) +} diff --git a/bundle/tests/include_with_glob/bundle.yml b/bundle/tests/include_with_glob/databricks.yml similarity index 100% rename from bundle/tests/include_with_glob/bundle.yml rename to bundle/tests/include_with_glob/databricks.yml diff --git a/bundle/tests/interpolation/bundle.yml b/bundle/tests/interpolation/databricks.yml similarity index 100% rename from bundle/tests/interpolation/bundle.yml rename to bundle/tests/interpolation/databricks.yml diff --git a/bundle/tests/interpolation_target/databricks.yml b/bundle/tests/interpolation_target/databricks.yml new file mode 100644 index 000000000..ad4ebe199 --- /dev/null +++ b/bundle/tests/interpolation_target/databricks.yml @@ -0,0 +1,14 @@ +bundle: + name: foo ${workspace.profile} + +workspace: + profile: bar + +targets: + development: + default: true + +resources: + jobs: + my_job: + name: "${bundle.name} | ${workspace.profile} | ${bundle.environment} | ${bundle.target}" diff --git a/bundle/tests/interpolation_test.go b/bundle/tests/interpolation_test.go index 47b0c775f..837891a07 100644 --- a/bundle/tests/interpolation_test.go +++ b/bundle/tests/interpolation_test.go @@ -20,3 +20,15 @@ func TestInterpolation(t *testing.T) { assert.Equal(t, "foo bar", b.Config.Bundle.Name) assert.Equal(t, "foo bar | bar", b.Config.Resources.Jobs["my_job"].Name) } + +func TestInterpolationWithTarget(t *testing.T) { + b := loadTarget(t, "./interpolation_target", "development") + err := bundle.Apply(context.Background(), b, interpolation.Interpolate( + interpolation.IncludeLookupsInPath("bundle"), + interpolation.IncludeLookupsInPath("workspace"), + )) + require.NoError(t, err) + assert.Equal(t, "foo bar", b.Config.Bundle.Name) + assert.Equal(t, "foo bar | bar | development | development", b.Config.Resources.Jobs["my_job"].Name) + +} diff --git a/bundle/tests/job_and_pipeline/databricks.yml b/bundle/tests/job_and_pipeline/databricks.yml new file mode 100644 index 000000000..67d306ffe --- /dev/null +++ b/bundle/tests/job_and_pipeline/databricks.yml @@ -0,0 +1,44 @@ +resources: + pipelines: + nyc_taxi_pipeline: + name: "nyc taxi loader" + libraries: + - notebook: + path: ./dlt/nyc_taxi_loader + +targets: + development: + mode: development + resources: + pipelines: + nyc_taxi_pipeline: + target: nyc_taxi_development + development: true + + staging: + resources: + pipelines: + nyc_taxi_pipeline: + target: nyc_taxi_staging + development: false + + production: + mode: production + resources: + pipelines: + nyc_taxi_pipeline: + target: nyc_taxi_production + development: false + photon: true + + jobs: + pipeline_schedule: + name: Daily refresh of production pipeline + + schedule: + quartz_cron_expression: 6 6 11 * * ? + timezone_id: UTC + + tasks: + - pipeline_task: + pipeline_id: "to be interpolated" diff --git a/bundle/tests/job_and_pipeline_test.go b/bundle/tests/job_and_pipeline_test.go index 775f415c2..5e8febc33 100644 --- a/bundle/tests/job_and_pipeline_test.go +++ b/bundle/tests/job_and_pipeline_test.go @@ -10,12 +10,12 @@ import ( ) func TestJobAndPipelineDevelopment(t *testing.T) { - b := loadEnvironment(t, "./job_and_pipeline", "development") + b := loadTarget(t, "./job_and_pipeline", "development") assert.Len(t, b.Config.Resources.Jobs, 0) assert.Len(t, b.Config.Resources.Pipelines, 1) p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) assert.Equal(t, b.Config.Bundle.Mode, config.Development) assert.True(t, p.Development) require.Len(t, p.Libraries, 1) @@ -24,12 +24,12 @@ func TestJobAndPipelineDevelopment(t *testing.T) { } func TestJobAndPipelineStaging(t *testing.T) { - b := loadEnvironment(t, "./job_and_pipeline", "staging") + b := loadTarget(t, "./job_and_pipeline", "staging") assert.Len(t, b.Config.Resources.Jobs, 0) assert.Len(t, b.Config.Resources.Pipelines, 1) p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) assert.False(t, p.Development) require.Len(t, p.Libraries, 1) assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) @@ -37,19 +37,19 @@ func TestJobAndPipelineStaging(t *testing.T) { } func TestJobAndPipelineProduction(t *testing.T) { - b := loadEnvironment(t, "./job_and_pipeline", "production") + b := loadTarget(t, "./job_and_pipeline", "production") assert.Len(t, b.Config.Resources.Jobs, 1) assert.Len(t, b.Config.Resources.Pipelines, 1) p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) assert.False(t, p.Development) require.Len(t, p.Libraries, 1) assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) assert.Equal(t, "nyc_taxi_production", p.Target) j := b.Config.Resources.Jobs["pipeline_schedule"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(j.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath)) assert.Equal(t, "Daily refresh of production pipeline", j.Name) require.Len(t, j.Tasks, 1) assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId) diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 42f1fc5be..f23b10764 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -10,16 +10,17 @@ import ( ) func load(t *testing.T, path string) *bundle.Bundle { - b, err := bundle.Load(path) + ctx := context.Background() + b, err := bundle.Load(ctx, path) require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) require.NoError(t, err) return b } -func loadEnvironment(t *testing.T, path, env string) *bundle.Bundle { +func loadTarget(t *testing.T, path, env string) *bundle.Bundle { b := load(t, path) - err := bundle.Apply(context.Background(), b, mutator.SelectEnvironment(env)) + err := bundle.Apply(context.Background(), b, mutator.SelectTarget(env)) require.NoError(t, err) return b } diff --git a/bundle/tests/model_serving_endpoint/databricks.yml b/bundle/tests/model_serving_endpoint/databricks.yml new file mode 100644 index 000000000..e4fb54a1f --- /dev/null +++ b/bundle/tests/model_serving_endpoint/databricks.yml @@ -0,0 +1,38 @@ +resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-endpoint" + config: + served_models: + - model_name: "model-name" + model_version: "1" + workload_size: "Small" + scale_to_zero_enabled: true + traffic_config: + routes: + - served_model_name: "model-name-1" + traffic_percentage: 100 + permissions: + - level: CAN_QUERY + group_name: users + +targets: + development: + mode: development + resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-dev-endpoint" + + staging: + resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-staging-endpoint" + + production: + mode: production + resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-prod-endpoint" diff --git a/bundle/tests/model_serving_endpoint_test.go b/bundle/tests/model_serving_endpoint_test.go new file mode 100644 index 000000000..bfa1a31b4 --- /dev/null +++ b/bundle/tests/model_serving_endpoint_test.go @@ -0,0 +1,48 @@ +package config_tests + +import ( + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/stretchr/testify/assert" +) + +func assertExpected(t *testing.T, p *resources.ModelServingEndpoint) { + assert.Equal(t, "model_serving_endpoint/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "model-name", p.Config.ServedModels[0].ModelName) + assert.Equal(t, "1", p.Config.ServedModels[0].ModelVersion) + assert.Equal(t, "model-name-1", p.Config.TrafficConfig.Routes[0].ServedModelName) + assert.Equal(t, 100, p.Config.TrafficConfig.Routes[0].TrafficPercentage) + assert.Equal(t, "users", p.Permissions[0].GroupName) + assert.Equal(t, "CAN_QUERY", p.Permissions[0].Level) +} + +func TestModelServingEndpointDevelopment(t *testing.T) { + b := loadTarget(t, "./model_serving_endpoint", "development") + assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1) + assert.Equal(t, b.Config.Bundle.Mode, config.Development) + + p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"] + assert.Equal(t, "my-dev-endpoint", p.Name) + assertExpected(t, p) +} + +func TestModelServingEndpointStaging(t *testing.T) { + b := loadTarget(t, "./model_serving_endpoint", "staging") + assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1) + + p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"] + assert.Equal(t, "my-staging-endpoint", p.Name) + assertExpected(t, p) +} + +func TestModelServingEndpointProduction(t *testing.T) { + b := loadTarget(t, "./model_serving_endpoint", "production") + assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1) + + p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"] + assert.Equal(t, "my-prod-endpoint", p.Name) + assertExpected(t, p) +} diff --git a/bundle/tests/override_job_cluster/databricks.yml b/bundle/tests/override_job_cluster/databricks.yml new file mode 100644 index 000000000..a85b3b711 --- /dev/null +++ b/bundle/tests/override_job_cluster/databricks.yml @@ -0,0 +1,35 @@ +bundle: + name: override_job_cluster + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + foo: + name: job + job_clusters: + - job_cluster_key: key + new_cluster: + spark_version: 13.3.x-scala2.12 + +targets: + development: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.xlarge + num_workers: 1 + + staging: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.2xlarge + num_workers: 4 diff --git a/bundle/tests/override_job_cluster_test.go b/bundle/tests/override_job_cluster_test.go new file mode 100644 index 000000000..1393e03e5 --- /dev/null +++ b/bundle/tests/override_job_cluster_test.go @@ -0,0 +1,29 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOverrideJobClusterDev(t *testing.T) { + b := loadTarget(t, "./override_job_cluster", "development") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 1, c.NewCluster.NumWorkers) +} + +func TestOverrideJobClusterStaging(t *testing.T) { + b := loadTarget(t, "./override_job_cluster", "staging") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.2xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 4, c.NewCluster.NumWorkers) +} diff --git a/bundle/tests/relative_path_with_includes/bundle.yml b/bundle/tests/relative_path_with_includes/bundle.yml new file mode 100644 index 000000000..36474c754 --- /dev/null +++ b/bundle/tests/relative_path_with_includes/bundle.yml @@ -0,0 +1,25 @@ +bundle: + name: sync_include + +include: + - "*/*.yml" + +sync: + include: + - ./folder_a/*.* + exclude: + - ./folder_b/*.* + +artifacts: + test_a: + type: whl + path: ./artifact_a + +resources: + jobs: + job_a: + name: "job_a" + tasks: + - task_key: "task_a" + libraries: + - whl: ./dist/job_a.whl diff --git a/bundle/tests/relative_path_with_includes/subfolder/include.yml b/bundle/tests/relative_path_with_includes/subfolder/include.yml new file mode 100644 index 000000000..597abe3bf --- /dev/null +++ b/bundle/tests/relative_path_with_includes/subfolder/include.yml @@ -0,0 +1,20 @@ +sync: + include: + - ./folder_c/*.* + exclude: + - ./folder_d/*.* + +artifacts: + test_b: + type: whl + path: ./artifact_b + + +resources: + jobs: + job_b: + name: "job_b" + tasks: + - task_key: "task_a" + libraries: + - whl: ./dist/job_b.whl diff --git a/bundle/tests/relative_path_with_includes_test.go b/bundle/tests/relative_path_with_includes_test.go new file mode 100644 index 000000000..92249c412 --- /dev/null +++ b/bundle/tests/relative_path_with_includes_test.go @@ -0,0 +1,28 @@ +package config_tests + +import ( + "context" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/stretchr/testify/assert" +) + +func TestRelativePathsWithIncludes(t *testing.T) { + b := load(t, "./relative_path_with_includes") + + m := mutator.TranslatePaths() + err := bundle.Apply(context.Background(), b, m) + assert.NoError(t, err) + + assert.Equal(t, "artifact_a", b.Config.Artifacts["test_a"].Path) + assert.Equal(t, filepath.Join("subfolder", "artifact_b"), b.Config.Artifacts["test_b"].Path) + + assert.ElementsMatch(t, []string{"./folder_a/*.*", filepath.Join("subfolder", "folder_c", "*.*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"./folder_b/*.*", filepath.Join("subfolder", "folder_d", "*.*")}, b.Config.Sync.Exclude) + + assert.Equal(t, filepath.Join("dist", "job_a.whl"), b.Config.Resources.Jobs["job_a"].Tasks[0].Libraries[0].Whl) + assert.Equal(t, filepath.Join("subfolder", "dist", "job_b.whl"), b.Config.Resources.Jobs["job_b"].Tasks[0].Libraries[0].Whl) +} diff --git a/bundle/tests/run_as/databricks.yml b/bundle/tests/run_as/databricks.yml new file mode 100644 index 000000000..18ea55736 --- /dev/null +++ b/bundle/tests/run_as/databricks.yml @@ -0,0 +1,42 @@ +bundle: + name: "run_as" + +run_as: + service_principal_name: "my_service_principal" + +targets: + development: + mode: development + run_as: + user_name: "my_user_name" + +resources: + pipelines: + nyc_taxi_pipeline: + permissions: + - level: CAN_VIEW + service_principal_name: my_service_principal + - level: CAN_VIEW + user_name: my_user_name + name: "nyc taxi loader" + libraries: + - notebook: + path: ./dlt/nyc_taxi_loader + jobs: + job_one: + name: Job One + tasks: + - task: + notebook_path: "./test.py" + job_two: + name: Job Two + tasks: + - task: + notebook_path: "./test.py" + job_three: + name: Job Three + run_as: + service_principal_name: "my_service_principal_for_job" + tasks: + - task: + notebook_path: "./test.py" diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go new file mode 100644 index 000000000..44c068165 --- /dev/null +++ b/bundle/tests/run_as_test.go @@ -0,0 +1,82 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/stretchr/testify/assert" +) + +func TestRunAsDefault(t *testing.T) { + b := load(t, "./run_as") + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + ctx := context.Background() + err := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, err) + + assert.Len(t, b.Config.Resources.Jobs, 3) + jobs := b.Config.Resources.Jobs + + assert.NotNil(t, jobs["job_one"].RunAs) + assert.Equal(t, "my_service_principal", jobs["job_one"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_one"].RunAs.UserName) + + assert.NotNil(t, jobs["job_two"].RunAs) + assert.Equal(t, "my_service_principal", jobs["job_two"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_two"].RunAs.UserName) + + assert.NotNil(t, jobs["job_three"].RunAs) + assert.Equal(t, "my_service_principal_for_job", jobs["job_three"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_three"].RunAs.UserName) + + pipelines := b.Config.Resources.Pipelines + assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].UserName, "my_user_name") + + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName, "my_service_principal") +} + +func TestRunAsDevelopment(t *testing.T) { + b := loadTarget(t, "./run_as", "development") + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + ctx := context.Background() + err := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, err) + + assert.Len(t, b.Config.Resources.Jobs, 3) + jobs := b.Config.Resources.Jobs + + assert.NotNil(t, jobs["job_one"].RunAs) + assert.Equal(t, "", jobs["job_one"].RunAs.ServicePrincipalName) + assert.Equal(t, "my_user_name", jobs["job_one"].RunAs.UserName) + + assert.NotNil(t, jobs["job_two"].RunAs) + assert.Equal(t, "", jobs["job_two"].RunAs.ServicePrincipalName) + assert.Equal(t, "my_user_name", jobs["job_two"].RunAs.UserName) + + assert.NotNil(t, jobs["job_three"].RunAs) + assert.Equal(t, "my_service_principal_for_job", jobs["job_three"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_three"].RunAs.UserName) + + pipelines := b.Config.Resources.Pipelines + assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName, "my_service_principal") + + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].UserName, "my_user_name") +} diff --git a/bundle/tests/suggest_target_test.go b/bundle/tests/suggest_target_test.go new file mode 100644 index 000000000..924d6a4e1 --- /dev/null +++ b/bundle/tests/suggest_target_test.go @@ -0,0 +1,17 @@ +package config_tests + +import ( + "path/filepath" + "testing" + + "github.com/databricks/cli/internal" + "github.com/stretchr/testify/require" +) + +func TestSuggestTargetIfWrongPassed(t *testing.T) { + t.Setenv("BUNDLE_ROOT", filepath.Join("target_overrides", "workspace")) + _, _, err := internal.RequireErrorRun(t, "bundle", "validate", "-e", "incorrect") + require.ErrorContains(t, err, "Available targets:") + require.ErrorContains(t, err, "development") + require.ErrorContains(t, err, "staging") +} diff --git a/bundle/tests/target_empty/databricks.yml b/bundle/tests/target_empty/databricks.yml new file mode 100644 index 000000000..cd415377c --- /dev/null +++ b/bundle/tests/target_empty/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: target_empty + +targets: + development: diff --git a/bundle/tests/target_empty_test.go b/bundle/tests/target_empty_test.go new file mode 100644 index 000000000..88705d8bb --- /dev/null +++ b/bundle/tests/target_empty_test.go @@ -0,0 +1,12 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTargetEmpty(t *testing.T) { + b := loadTarget(t, "./target_empty", "development") + assert.Equal(t, "development", b.Config.Bundle.Target) +} diff --git a/bundle/tests/target_overrides/resources/databricks.yml b/bundle/tests/target_overrides/resources/databricks.yml new file mode 100644 index 000000000..f6e2a7edb --- /dev/null +++ b/bundle/tests/target_overrides/resources/databricks.yml @@ -0,0 +1,20 @@ +bundle: + name: environment_overrides + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + job1: + name: "base job" + +targets: + development: + default: true + + staging: + resources: + jobs: + job1: + name: "staging job" diff --git a/bundle/tests/target_overrides/workspace/databricks.yml b/bundle/tests/target_overrides/workspace/databricks.yml new file mode 100644 index 000000000..8c4f9487e --- /dev/null +++ b/bundle/tests/target_overrides/workspace/databricks.yml @@ -0,0 +1,14 @@ +bundle: + name: environment_overrides + +workspace: + host: https://acme.cloud.databricks.com/ + +targets: + development: + workspace: + host: https://development.acme.cloud.databricks.com/ + + staging: + workspace: + host: https://staging.acme.cloud.databricks.com/ diff --git a/bundle/tests/target_overrides_test.go b/bundle/tests/target_overrides_test.go new file mode 100644 index 000000000..2516ce2a3 --- /dev/null +++ b/bundle/tests/target_overrides_test.go @@ -0,0 +1,27 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTargetOverridesWorkspaceDev(t *testing.T) { + b := loadTarget(t, "./target_overrides/workspace", "development") + assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host) +} + +func TestTargetOverridesWorkspaceStaging(t *testing.T) { + b := loadTarget(t, "./target_overrides/workspace", "staging") + assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host) +} + +func TestTargetOverridesResourcesDev(t *testing.T) { + b := loadTarget(t, "./target_overrides/resources", "development") + assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name) +} + +func TestTargetOverridesResourcesStaging(t *testing.T) { + b := loadTarget(t, "./target_overrides/resources", "staging") + assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) +} diff --git a/bundle/tests/variables/env_overrides/bundle.yml b/bundle/tests/variables/env_overrides/databricks.yml similarity index 97% rename from bundle/tests/variables/env_overrides/bundle.yml rename to bundle/tests/variables/env_overrides/databricks.yml index 1fec10733..2157596c3 100644 --- a/bundle/tests/variables/env_overrides/bundle.yml +++ b/bundle/tests/variables/env_overrides/databricks.yml @@ -12,7 +12,7 @@ bundle: workspace: profile: ${var.a} ${var.b} -environments: +targets: env-with-single-variable-override: variables: b: dev-b diff --git a/bundle/tests/variables/vanilla/bundle.yml b/bundle/tests/variables/vanilla/databricks.yml similarity index 100% rename from bundle/tests/variables/vanilla/bundle.yml rename to bundle/tests/variables/vanilla/databricks.yml diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 365ffbd4b..93c822505 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -34,10 +34,10 @@ func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } -func TestVariablesEnvironmentsBlockOverride(t *testing.T) { +func TestVariablesTargetsBlockOverride(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-with-single-variable-override"), + mutator.SelectTarget("env-with-single-variable-override"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -46,10 +46,10 @@ func TestVariablesEnvironmentsBlockOverride(t *testing.T) { assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) } -func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) { +func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-with-two-variable-overrides"), + mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -58,11 +58,11 @@ func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) { assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) } -func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) { +func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "env-var-b") b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-with-two-variable-overrides"), + mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -71,10 +71,10 @@ func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) { assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) } -func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) { +func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-missing-a-required-variable-assignment"), + mutator.SelectTarget("env-missing-a-required-variable-assignment"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -82,10 +82,10 @@ func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) { assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } -func TestVariablesEnvironmentsBlockOverrideWithUndefinedVariables(t *testing.T) { +func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-using-an-undefined-variable"), + mutator.SelectTarget("env-using-an-undefined-variable"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), diff --git a/bundle/tests/yaml_anchors/bundle.yml b/bundle/tests/yaml_anchors/databricks.yml similarity index 100% rename from bundle/tests/yaml_anchors/bundle.yml rename to bundle/tests/yaml_anchors/databricks.yml diff --git a/cmd/account/access-control/access-control.go b/cmd/account/access-control/access-control.go index 5cec69a31..01c076fbd 100755 --- a/cmd/account/access-control/access-control.go +++ b/cmd/account/access-control/access-control.go @@ -12,42 +12,64 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "access-control", - Short: `These APIs manage access rules on resources in an account.`, - Long: `These APIs manage access rules on resources in an account. Currently, only +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "access-control", + Short: `These APIs manage access rules on resources in an account.`, + Long: `These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is called a rule set.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get-assignable-roles-for-resource command -var getAssignableRolesForResourceReq iam.GetAssignableRolesForResourceRequest -func init() { - Cmd.AddCommand(getAssignableRolesForResourceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getAssignableRolesForResourceOverrides []func( + *cobra.Command, + *iam.GetAssignableRolesForResourceRequest, +) + +func newGetAssignableRolesForResource() *cobra.Command { + cmd := &cobra.Command{} + + var getAssignableRolesForResourceReq iam.GetAssignableRolesForResourceRequest + // TODO: short flags -} - -var getAssignableRolesForResourceCmd = &cobra.Command{ - Use: "get-assignable-roles-for-resource RESOURCE", - Short: `Get assignable roles for a resource.`, - Long: `Get assignable roles for a resource. + cmd.Use = "get-assignable-roles-for-resource RESOURCE" + cmd.Short = `Get assignable roles for a resource.` + cmd.Long = `Get assignable roles for a resource. Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the - role.`, + role.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -58,37 +80,59 @@ var getAssignableRolesForResourceCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getAssignableRolesForResourceOverrides { + fn(cmd, &getAssignableRolesForResourceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetAssignableRolesForResource()) + }) } // start get-rule-set command -var getRuleSetReq iam.GetRuleSetRequest -func init() { - Cmd.AddCommand(getRuleSetCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRuleSetOverrides []func( + *cobra.Command, + *iam.GetRuleSetRequest, +) + +func newGetRuleSet() *cobra.Command { + cmd := &cobra.Command{} + + var getRuleSetReq iam.GetRuleSetRequest + // TODO: short flags -} - -var getRuleSetCmd = &cobra.Command{ - Use: "get-rule-set NAME ETAG", - Short: `Get a rule set.`, - Long: `Get a rule set. + cmd.Use = "get-rule-set NAME ETAG" + cmd.Short = `Get a rule set.` + cmd.Long = `Get a rule set. Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default - rule set for each resource is supported.`, + rule set for each resource is supported.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -100,35 +144,56 @@ var getRuleSetCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRuleSetOverrides { + fn(cmd, &getRuleSetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRuleSet()) + }) } // start update-rule-set command -var updateRuleSetReq iam.UpdateRuleSetRequest -var updateRuleSetJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateRuleSetCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateRuleSetOverrides []func( + *cobra.Command, + *iam.UpdateRuleSetRequest, +) + +func newUpdateRuleSet() *cobra.Command { + cmd := &cobra.Command{} + + var updateRuleSetReq iam.UpdateRuleSetRequest + var updateRuleSetJson flags.JsonFlag + // TODO: short flags - updateRuleSetCmd.Flags().Var(&updateRuleSetJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateRuleSetJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateRuleSetCmd = &cobra.Command{ - Use: "update-rule-set", - Short: `Update a rule set.`, - Long: `Update a rule set. + cmd.Use = "update-rule-set" + cmd.Short = `Update a rule set.` + cmd.Long = `Update a rule set. Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between - concurrent updates.`, + concurrent updates.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -146,10 +211,24 @@ var updateRuleSetCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateRuleSetOverrides { + fn(cmd, &updateRuleSetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateRuleSet()) + }) } // end service AccountAccessControl diff --git a/cmd/account/billable-usage/billable-usage.go b/cmd/account/billable-usage/billable-usage.go index babc7bc2c..b5b9749dc 100755 --- a/cmd/account/billable-usage/billable-usage.go +++ b/cmd/account/billable-usage/billable-usage.go @@ -8,31 +8,51 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "billable-usage", - Short: `This API allows you to download billable usage logs for the specified account and date range.`, - Long: `This API allows you to download billable usage logs for the specified account +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "billable-usage", + Short: `This API allows you to download billable usage logs for the specified account and date range.`, + Long: `This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.`, - Annotations: map[string]string{ - "package": "billing", - }, + GroupID: "billing", + Annotations: map[string]string{ + "package": "billing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start download command -var downloadReq billing.DownloadRequest -func init() { - Cmd.AddCommand(downloadCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var downloadOverrides []func( + *cobra.Command, + *billing.DownloadRequest, +) + +func newDownload() *cobra.Command { + cmd := &cobra.Command{} + + var downloadReq billing.DownloadRequest + // TODO: short flags - downloadCmd.Flags().BoolVar(&downloadReq.PersonalData, "personal-data", downloadReq.PersonalData, `Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators.`) + cmd.Flags().BoolVar(&downloadReq.PersonalData, "personal-data", downloadReq.PersonalData, `Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators.`) -} - -var downloadCmd = &cobra.Command{ - Use: "download START_MONTH END_MONTH", - Short: `Return billable usage logs.`, - Long: `Return billable usage logs. + cmd.Use = "download START_MONTH END_MONTH" + cmd.Short = `Return billable usage logs.` + cmd.Long = `Return billable usage logs. Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might @@ -43,15 +63,17 @@ var downloadCmd = &cobra.Command{ this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema`, + [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,10 +85,24 @@ var downloadCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range downloadOverrides { + fn(cmd, &downloadReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDownload()) + }) } // end service BillableUsage diff --git a/cmd/account/budgets/budgets.go b/cmd/account/budgets/budgets.go index 3e26b181a..1a0c7a0a9 100755 --- a/cmd/account/budgets/budgets.go +++ b/cmd/account/budgets/budgets.go @@ -12,40 +12,61 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "budgets", - Short: `These APIs manage budget configuration including notifications for exceeding a budget for a period.`, - Long: `These APIs manage budget configuration including notifications for exceeding a - budget for a period. They can also retrieve the status of each budget.`, - Annotations: map[string]string{ - "package": "billing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) - // This service is being previewed; hide from help output. - Hidden: true, +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "budgets", + Short: `These APIs manage budget configuration including notifications for exceeding a budget for a period.`, + Long: `These APIs manage budget configuration including notifications for exceeding a + budget for a period. They can also retrieve the status of each budget.`, + GroupID: "billing", + Annotations: map[string]string{ + "package": "billing", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq billing.WrappedBudget -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *billing.WrappedBudget, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq billing.WrappedBudget + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new budget.`, - Long: `Create a new budget. + cmd.Use = "create" + cmd.Short = `Create a new budget.` + cmd.Long = `Create a new budget. - Creates a new budget in the specified account.`, + Creates a new budget in the specified account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,31 +84,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq billing.DeleteBudgetRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *billing.DeleteBudgetRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq billing.DeleteBudgetRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete BUDGET_ID", - Short: `Delete budget.`, - Long: `Delete budget. + cmd.Use = "delete BUDGET_ID" + cmd.Short = `Delete budget.` + cmd.Long = `Delete budget. - Deletes the budget specified by its UUID.`, + Deletes the budget specified by its UUID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -115,32 +157,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq billing.GetBudgetRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *billing.GetBudgetRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq billing.GetBudgetRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get BUDGET_ID", - Short: `Get budget and its status.`, - Long: `Get budget and its status. + cmd.Use = "get BUDGET_ID" + cmd.Short = `Get budget and its status.` + cmd.Long = `Get budget and its status. Gets the budget specified by its UUID, including noncumulative status for each - day that the budget is configured to include.`, + day that the budget is configured to include.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -168,30 +231,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all budgets.`, - Long: `Get all budgets. + cmd.Use = "list" + cmd.Short = `Get all budgets.` + cmd.Long = `Get all budgets. Gets all budgets associated with this account, including noncumulative status - for each day that the budget is configured to include.`, + for each day that the budget is configured to include.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Budgets.ListAll(ctx) @@ -199,34 +280,55 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq billing.WrappedBudget -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *billing.WrappedBudget, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq billing.WrappedBudget + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Modify budget.`, - Long: `Modify budget. + cmd.Use = "update" + cmd.Short = `Modify budget.` + cmd.Long = `Modify budget. Modifies a budget in this account. Budget properties are completely - overwritten.`, + overwritten.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -244,10 +346,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Budgets diff --git a/cmd/account/cmd.go b/cmd/account/cmd.go index 923948b6b..2b06171db 100644 --- a/cmd/account/cmd.go +++ b/cmd/account/cmd.go @@ -3,7 +3,6 @@ package account import ( - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" account_access_control "github.com/databricks/cli/cmd/account/access-control" @@ -17,6 +16,7 @@ import ( log_delivery "github.com/databricks/cli/cmd/account/log-delivery" account_metastore_assignments "github.com/databricks/cli/cmd/account/metastore-assignments" account_metastores "github.com/databricks/cli/cmd/account/metastores" + account_network_policy "github.com/databricks/cli/cmd/account/network-policy" networks "github.com/databricks/cli/cmd/account/networks" o_auth_enrollment "github.com/databricks/cli/cmd/account/o-auth-enrollment" private_access "github.com/databricks/cli/cmd/account/private-access" @@ -32,63 +32,43 @@ import ( workspaces "github.com/databricks/cli/cmd/account/workspaces" ) -var accountCmd = &cobra.Command{ - Use: "account", - Short: `Databricks Account Commands`, -} - -func init() { - root.RootCmd.AddCommand(accountCmd) - - accountCmd.AddCommand(account_access_control.Cmd) - accountCmd.AddCommand(billable_usage.Cmd) - accountCmd.AddCommand(budgets.Cmd) - accountCmd.AddCommand(credentials.Cmd) - accountCmd.AddCommand(custom_app_integration.Cmd) - accountCmd.AddCommand(encryption_keys.Cmd) - accountCmd.AddCommand(account_groups.Cmd) - accountCmd.AddCommand(account_ip_access_lists.Cmd) - accountCmd.AddCommand(log_delivery.Cmd) - accountCmd.AddCommand(account_metastore_assignments.Cmd) - accountCmd.AddCommand(account_metastores.Cmd) - accountCmd.AddCommand(networks.Cmd) - accountCmd.AddCommand(o_auth_enrollment.Cmd) - accountCmd.AddCommand(private_access.Cmd) - accountCmd.AddCommand(published_app_integration.Cmd) - accountCmd.AddCommand(service_principal_secrets.Cmd) - accountCmd.AddCommand(account_service_principals.Cmd) - accountCmd.AddCommand(account_settings.Cmd) - accountCmd.AddCommand(storage.Cmd) - accountCmd.AddCommand(account_storage_credentials.Cmd) - accountCmd.AddCommand(account_users.Cmd) - accountCmd.AddCommand(vpc_endpoints.Cmd) - accountCmd.AddCommand(workspace_assignment.Cmd) - accountCmd.AddCommand(workspaces.Cmd) - - // Register commands with groups - account_access_control.Cmd.GroupID = "iam" - billable_usage.Cmd.GroupID = "billing" - budgets.Cmd.GroupID = "billing" - credentials.Cmd.GroupID = "provisioning" - custom_app_integration.Cmd.GroupID = "oauth2" - encryption_keys.Cmd.GroupID = "provisioning" - account_groups.Cmd.GroupID = "iam" - account_ip_access_lists.Cmd.GroupID = "settings" - log_delivery.Cmd.GroupID = "billing" - account_metastore_assignments.Cmd.GroupID = "catalog" - account_metastores.Cmd.GroupID = "catalog" - networks.Cmd.GroupID = "provisioning" - o_auth_enrollment.Cmd.GroupID = "oauth2" - private_access.Cmd.GroupID = "provisioning" - published_app_integration.Cmd.GroupID = "oauth2" - service_principal_secrets.Cmd.GroupID = "oauth2" - account_service_principals.Cmd.GroupID = "iam" - account_settings.Cmd.GroupID = "settings" - storage.Cmd.GroupID = "provisioning" - account_storage_credentials.Cmd.GroupID = "catalog" - account_users.Cmd.GroupID = "iam" - vpc_endpoints.Cmd.GroupID = "provisioning" - workspace_assignment.Cmd.GroupID = "iam" - workspaces.Cmd.GroupID = "provisioning" - +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "account", + Short: `Databricks Account Commands`, + } + + cmd.AddCommand(account_access_control.New()) + cmd.AddCommand(billable_usage.New()) + cmd.AddCommand(budgets.New()) + cmd.AddCommand(credentials.New()) + cmd.AddCommand(custom_app_integration.New()) + cmd.AddCommand(encryption_keys.New()) + cmd.AddCommand(account_groups.New()) + cmd.AddCommand(account_ip_access_lists.New()) + cmd.AddCommand(log_delivery.New()) + cmd.AddCommand(account_metastore_assignments.New()) + cmd.AddCommand(account_metastores.New()) + cmd.AddCommand(account_network_policy.New()) + cmd.AddCommand(networks.New()) + cmd.AddCommand(o_auth_enrollment.New()) + cmd.AddCommand(private_access.New()) + cmd.AddCommand(published_app_integration.New()) + cmd.AddCommand(service_principal_secrets.New()) + cmd.AddCommand(account_service_principals.New()) + cmd.AddCommand(account_settings.New()) + cmd.AddCommand(storage.New()) + cmd.AddCommand(account_storage_credentials.New()) + cmd.AddCommand(account_users.New()) + cmd.AddCommand(vpc_endpoints.New()) + cmd.AddCommand(workspace_assignment.New()) + cmd.AddCommand(workspaces.New()) + + // Register all groups with the parent command. + groups := Groups() + for i := range groups { + cmd.AddGroup(&groups[i]) + } + + return cmd } diff --git a/cmd/account/credentials/credentials.go b/cmd/account/credentials/credentials.go index 5a1362d15..99204bfbd 100755 --- a/cmd/account/credentials/credentials.go +++ b/cmd/account/credentials/credentials.go @@ -12,34 +12,54 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "credentials", - Short: `These APIs manage credential configurations for this workspace.`, - Long: `These APIs manage credential configurations for this workspace. Databricks +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "credentials", + Short: `These APIs manage credential configurations for this workspace.`, + Long: `These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the new workspace. A credential configuration encapsulates this role information, and its ID is used when creating a new workspace.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateCredentialRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateCredentialRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateCredentialRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create credential configuration.`, - Long: `Create credential configuration. + cmd.Use = "create" + cmd.Short = `Create credential configuration.` + cmd.Long = `Create credential configuration. Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set @@ -54,11 +74,12 @@ var createCmd = &cobra.Command{ For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -76,33 +97,54 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteCredentialRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete CREDENTIALS_ID", - Short: `Delete credential configuration.`, - Long: `Delete credential configuration. + cmd.Use = "delete CREDENTIALS_ID" + cmd.Short = `Delete credential configuration.` + cmd.Long = `Delete credential configuration. Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any - workspace.`, + workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -130,32 +172,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CREDENTIALS_ID", - Short: `Get credential configuration.`, - Long: `Get credential configuration. + cmd.Use = "get CREDENTIALS_ID" + cmd.Short = `Get credential configuration.` + cmd.Long = `Get credential configuration. Gets a Databricks credential configuration object for an account, both - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -183,30 +246,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all credential configurations.`, - Long: `Get all credential configurations. + cmd.Use = "list" + cmd.Short = `Get all credential configurations.` + cmd.Long = `Get all credential configurations. Gets all Databricks credential configurations associated with an account - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Credentials.List(ctx) @@ -214,10 +295,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Credentials diff --git a/cmd/account/credentials/overrides.go b/cmd/account/credentials/overrides.go index 505215055..9f1e6cb66 100644 --- a/cmd/account/credentials/overrides.go +++ b/cmd/account/credentials/overrides.go @@ -1,9 +1,16 @@ package credentials -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.CredentialsId | green}} {{.CredentialsName}} {{.AwsCredentials.StsRole.RoleArn}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go index 837ac5188..e58688095 100755 --- a/cmd/account/custom-app-integration/custom-app-integration.go +++ b/cmd/account/custom-app-integration/custom-app-integration.go @@ -12,48 +12,70 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "custom-app-integration", - Short: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, - Long: `These APIs enable administrators to manage custom oauth app integrations, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "custom-app-integration", + Short: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, + Long: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. **Note:** You can only add/use the OAuth custom application integrations when OAuth enrollment status is enabled. For more details see :method:OAuthEnrollment/create`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreateCustomAppIntegration -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateCustomAppIntegration, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateCustomAppIntegration + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `indicates if an oauth client-secret should be generated.`) + cmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `indicates if an oauth client-secret should be generated.`) + // TODO: array: scopes // TODO: complex arg: token_access_policy -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create Custom OAuth App Integration.`, - Long: `Create Custom OAuth App Integration. + cmd.Use = "create" + cmd.Short = `Create Custom OAuth App Integration.` + cmd.Long = `Create Custom OAuth App Integration. Create Custom OAuth App Integration. You can retrieve the custom oauth app integration via - :method:CustomAppIntegration/get.`, + :method:CustomAppIntegration/get.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -71,36 +93,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq oauth2.DeleteCustomAppIntegrationRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeleteCustomAppIntegrationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeleteCustomAppIntegrationRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete INTEGRATION_ID", - Short: `Delete Custom OAuth App Integration.`, - Long: `Delete Custom OAuth App Integration. + cmd.Use = "delete INTEGRATION_ID" + cmd.Short = `Delete Custom OAuth App Integration.` + cmd.Long = `Delete Custom OAuth App Integration. Delete an existing Custom OAuth App Integration. You can retrieve the custom - oauth app integration via :method:CustomAppIntegration/get.`, + oauth app integration via :method:CustomAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -111,35 +155,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq oauth2.GetCustomAppIntegrationRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *oauth2.GetCustomAppIntegrationRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq oauth2.GetCustomAppIntegrationRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get INTEGRATION_ID", - Short: `Get OAuth Custom App Integration.`, - Long: `Get OAuth Custom App Integration. + cmd.Use = "get INTEGRATION_ID" + cmd.Short = `Get OAuth Custom App Integration.` + cmd.Long = `Get OAuth Custom App Integration. - Gets the Custom OAuth App Integration for the given integration id.`, + Gets the Custom OAuth App Integration for the given integration id.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -150,30 +216,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get custom oauth app integrations.`, - Long: `Get custom oauth app integrations. + cmd.Use = "list" + cmd.Short = `Get custom oauth app integrations.` + cmd.Long = `Get custom oauth app integrations. Get the list of custom oauth app integrations for the specified Databricks - account`, + account` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.CustomAppIntegration.ListAll(ctx) @@ -181,41 +265,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq oauth2.UpdateCustomAppIntegration -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *oauth2.UpdateCustomAppIntegration, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq oauth2.UpdateCustomAppIntegration + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: redirect_urls // TODO: complex arg: token_access_policy -} - -var updateCmd = &cobra.Command{ - Use: "update INTEGRATION_ID", - Short: `Updates Custom OAuth App Integration.`, - Long: `Updates Custom OAuth App Integration. + cmd.Use = "update INTEGRATION_ID" + cmd.Short = `Updates Custom OAuth App Integration.` + cmd.Long = `Updates Custom OAuth App Integration. Updates an existing custom OAuth App Integration. You can retrieve the custom - oauth app integration via :method:CustomAppIntegration/get.`, + oauth app integration via :method:CustomAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -232,10 +338,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service CustomAppIntegration diff --git a/cmd/account/encryption-keys/encryption-keys.go b/cmd/account/encryption-keys/encryption-keys.go index 0db4af80e..2172c49fc 100755 --- a/cmd/account/encryption-keys/encryption-keys.go +++ b/cmd/account/encryption-keys/encryption-keys.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "encryption-keys", - Short: `These APIs manage encryption key configurations for this workspace (optional).`, - Long: `These APIs manage encryption key configurations for this workspace (optional). +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "encryption-keys", + Short: `These APIs manage encryption key configurations for this workspace (optional).`, + Long: `These APIs manage encryption key configurations for this workspace (optional). A key configuration encapsulates the AWS KMS key information and some information about how the key configuration can be used. There are two possible uses for key configurations: @@ -31,29 +36,44 @@ var Cmd = &cobra.Command{ encryption requires that the workspace is on the E2 version of the platform. If you have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact your Databricks representative.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateCustomerManagedKeyRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateCustomerManagedKeyRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateCustomerManagedKeyRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_key_info // TODO: complex arg: gcp_key_info -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create encryption key configuration.`, - Long: `Create encryption key configuration. + cmd.Use = "create" + cmd.Short = `Create encryption key configuration.` + cmd.Long = `Create encryption key configuration. Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to @@ -71,11 +91,12 @@ var createCmd = &cobra.Command{ This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -93,36 +114,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteEncryptionKeyRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteEncryptionKeyRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteEncryptionKeyRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete CUSTOMER_MANAGED_KEY_ID", - Short: `Delete encryption key configuration.`, - Long: `Delete encryption key configuration. + cmd.Use = "delete CUSTOMER_MANAGED_KEY_ID" + cmd.Short = `Delete encryption key configuration.` + cmd.Long = `Delete encryption key configuration. Deletes a customer-managed key configuration object for an account. You cannot - delete a configuration that is associated with a running workspace.`, + delete a configuration that is associated with a running workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -133,25 +176,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetEncryptionKeyRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetEncryptionKeyRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetEncryptionKeyRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CUSTOMER_MANAGED_KEY_ID", - Short: `Get encryption key configuration.`, - Long: `Get encryption key configuration. + cmd.Use = "get CUSTOMER_MANAGED_KEY_ID" + cmd.Short = `Get encryption key configuration.` + cmd.Long = `Get encryption key configuration. Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to @@ -167,15 +230,17 @@ var getCmd = &cobra.Command{ types, subscription types, and AWS regions. This operation is available only if your account is on the E2 version of the - platform.",`, + platform.",` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -186,23 +251,40 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all encryption key configurations.`, - Long: `Get all encryption key configurations. + cmd.Use = "list" + cmd.Short = `Get all encryption key configurations.` + cmd.Long = `Get all encryption key configurations. Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, @@ -216,11 +298,12 @@ var listCmd = &cobra.Command{ types, subscription types, and AWS regions. This operation is available only if your account is on the E2 version of the - platform.`, + platform.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.EncryptionKeys.List(ctx) @@ -228,10 +311,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service EncryptionKeys diff --git a/cmd/account/encryption-keys/overrides.go b/cmd/account/encryption-keys/overrides.go index 9a27ac00d..906211750 100644 --- a/cmd/account/encryption-keys/overrides.go +++ b/cmd/account/encryption-keys/overrides.go @@ -1,9 +1,16 @@ package encryption_keys -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.CustomerManagedKeyId | green}} {{range .UseCases}}{{.}} {{end}} {{.AwsKeyInfo.KeyArn}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/groups.go b/cmd/account/groups.go index 7c9d70e3d..10a795b03 100644 --- a/cmd/account/groups.go +++ b/cmd/account/groups.go @@ -32,11 +32,3 @@ func Groups() []cobra.Group { }, } } - -func init() { - // Register groups with parent command - groups := Groups() - for i := range groups { - accountCmd.AddGroup(&groups[i]) - } -} diff --git a/cmd/account/groups/groups.go b/cmd/account/groups/groups.go index 5897ef292..6e3b98c00 100755 --- a/cmd/account/groups/groups.go +++ b/cmd/account/groups/groups.go @@ -12,59 +12,81 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "groups", - Short: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.`, - Long: `Groups simplify identity management, making it easier to assign access to +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "groups", + Short: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.`, + Long: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks account identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.Group -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.Group + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new group.`, - Long: `Create a new group. + cmd.Use = "create" + cmd.Short = `Create a new group.` + cmd.Long = `Create a new group. Creates a group in the Databricks account with a unique name, using the - supplied group details.`, + supplied group details.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -81,31 +103,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteAccountGroupRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountGroupRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountGroupRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a group.`, - Long: `Delete a group. + cmd.Use = "delete ID" + cmd.Short = `Delete a group.` + cmd.Long = `Delete a group. - Deletes a group from the Databricks account.`, + Deletes a group from the Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -133,31 +176,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetAccountGroupRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountGroupRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountGroupRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get group details.`, - Long: `Get group details. + cmd.Use = "get ID" + cmd.Short = `Get group details.` + cmd.Long = `Get group details. - Gets the information for a specific group in the Databricks account.`, + Gets the information for a specific group in the Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -185,48 +249,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListAccountGroupsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountGroupsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountGroupsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List group details.`, - Long: `List group details. + cmd.Use = "list" + cmd.Short = `List group details.` + cmd.Long = `List group details. - Gets all details of the groups associated with the Databricks account.`, + Gets all details of the groups associated with the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -243,35 +329,57 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schemas -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update group details.`, - Long: `Update group details. + cmd.Use = "patch ID" + cmd.Short = `Update group details.` + cmd.Long = `Update group details. - Partially updates the details of a group.`, + Partially updates the details of a group.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -305,42 +413,63 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.Group -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.Group + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a group.`, - Long: `Replace a group. + cmd.Use = "update ID" + cmd.Short = `Replace a group.` + cmd.Long = `Replace a group. - Updates the details of a group by replacing the entire group entity.`, + Updates the details of a group by replacing the entire group entity.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -375,10 +504,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountGroups diff --git a/cmd/account/groups/overrides.go b/cmd/account/groups/overrides.go index 28c91c4d2..37d05c64f 100644 --- a/cmd/account/groups/overrides.go +++ b/cmd/account/groups/overrides.go @@ -1,10 +1,18 @@ package groups -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListAccountGroupsRequest) { listReq.Attributes = "id,displayName" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.DisplayName}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/ip-access-lists/ip-access-lists.go b/cmd/account/ip-access-lists/ip-access-lists.go index 7f43ff2a7..328883ac3 100755 --- a/cmd/account/ip-access-lists/ip-access-lists.go +++ b/cmd/account/ip-access-lists/ip-access-lists.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "ip-access-lists", - Short: `The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.`, - Long: `The Accounts IP Access List API enables account admins to configure IP access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "ip-access-lists", + Short: `The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.`, + Long: `The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. Account IP Access Lists affect web application access and REST API access to @@ -37,26 +42,41 @@ var Cmd = &cobra.Command{ After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq settings.CreateIpAccessList -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *settings.CreateIpAccessList, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq settings.CreateIpAccessList + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create access list.`, - Long: `Create access list. + cmd.Use = "create" + cmd.Short = `Create access list.` + cmd.Long = `Create access list. Creates an IP access list for the account. @@ -71,11 +91,12 @@ var createCmd = &cobra.Command{ * If the new list would block the calling user's current IP, error 400 is returned with error_code value INVALID_STATE. - It can take a few minutes for the changes to take effect.`, + It can take a few minutes for the changes to take effect.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -93,31 +114,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq settings.DeleteAccountIpAccessListRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteAccountIpAccessListRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteAccountIpAccessListRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete IP_ACCESS_LIST_ID", - Short: `Delete access list.`, - Long: `Delete access list. + cmd.Use = "delete IP_ACCESS_LIST_ID" + cmd.Short = `Delete access list.` + cmd.Long = `Delete access list. - Deletes an IP access list, specified by its list ID.`, + Deletes an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -145,31 +187,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq settings.GetAccountIpAccessListRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetAccountIpAccessListRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetAccountIpAccessListRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get IP_ACCESS_LIST_ID", - Short: `Get IP access list.`, - Long: `Get IP access list. + cmd.Use = "get IP_ACCESS_LIST_ID" + cmd.Short = `Get IP access list.` + cmd.Long = `Get IP access list. - Gets an IP access list, specified by its list ID.`, + Gets an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -197,29 +260,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get access lists.`, - Long: `Get access lists. + cmd.Use = "list" + cmd.Short = `Get access lists.` + cmd.Long = `Get access lists. - Gets all IP access lists for the specified account.`, + Gets all IP access lists for the specified account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.IpAccessLists.ListAll(ctx) @@ -227,29 +308,49 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start replace command -var replaceReq settings.ReplaceIpAccessList -var replaceJson flags.JsonFlag -func init() { - Cmd.AddCommand(replaceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var replaceOverrides []func( + *cobra.Command, + *settings.ReplaceIpAccessList, +) + +func newReplace() *cobra.Command { + cmd := &cobra.Command{} + + var replaceReq settings.ReplaceIpAccessList + var replaceJson flags.JsonFlag + // TODO: short flags - replaceCmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) - replaceCmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var replaceCmd = &cobra.Command{ - Use: "replace", - Short: `Replace access list.`, - Long: `Replace access list. + cmd.Use = "replace" + cmd.Short = `Replace access list.` + cmd.Long = `Replace access list. Replaces an IP access list, specified by its ID. @@ -260,11 +361,12 @@ var replaceCmd = &cobra.Command{ counts as a single value. Attempts to exceed that number return error 400 with error_code value QUOTA_EXCEEDED. * If the resulting list would block the calling user's current IP, error 400 is returned with error_code value - INVALID_STATE. It can take a few minutes for the changes to take effect.`, + INVALID_STATE. It can take a few minutes for the changes to take effect.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -282,29 +384,49 @@ var replaceCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range replaceOverrides { + fn(cmd, &replaceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReplace()) + }) } // start update command -var updateReq settings.UpdateIpAccessList -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *settings.UpdateIpAccessList, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq settings.UpdateIpAccessList + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update access list.`, - Long: `Update access list. + cmd.Use = "update" + cmd.Short = `Update access list.` + cmd.Long = `Update access list. Updates an existing IP access list, specified by its ID. @@ -319,11 +441,12 @@ var updateCmd = &cobra.Command{ * If the updated list would block the calling user's current IP, error 400 is returned with error_code value INVALID_STATE. - It can take a few minutes for the changes to take effect.`, + It can take a few minutes for the changes to take effect.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -341,10 +464,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountIpAccessLists diff --git a/cmd/account/log-delivery/log-delivery.go b/cmd/account/log-delivery/log-delivery.go index d5ae87b1b..6323e0ddd 100755 --- a/cmd/account/log-delivery/log-delivery.go +++ b/cmd/account/log-delivery/log-delivery.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "log-delivery", - Short: `These APIs manage log delivery configurations for this account.`, - Long: `These APIs manage log delivery configurations for this account. The two +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "log-delivery", + Short: `These APIs manage log delivery configurations for this account.`, + Long: `These APIs manage log delivery configurations for this account. The two supported log types for this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all account ID types. @@ -29,22 +34,21 @@ var Cmd = &cobra.Command{ 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using Databricks APIs, call the Account API to create a - [storage configuration object](#operation/create-storage-config) that uses the - bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM - role. For full details, including the required IAM role policies and trust + [storage configuration object](:method:Storage/Create) that uses the bucket + name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. + For full details, including the required IAM role policies and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the Account API to create a [credential configuration - object](#operation/create-credential-config) that uses the IAM role's ARN. 3. - **Create log delivery configuration**: Using Databricks APIs, call the Account - API to [create a log delivery - configuration](#operation/create-log-delivery-config) that uses the credential - and storage configuration objects from previous steps. You can specify if the - logs should include all events of that log type in your account (_Account - level_ delivery) or only events for a specific set of workspaces (_workspace - level_ delivery). Account level log delivery applies to all current and future - workspaces plus account level logs, while workspace level log delivery solely - delivers logs related to the specified workspaces. You can create multiple - types of delivery configurations per account. + object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create + log delivery configuration**: Using Databricks APIs, call the Account API to + [create a log delivery configuration](:method:LogDelivery/Create) that uses + the credential and storage configuration objects from previous steps. You can + specify if the logs should include all events of that log type in your account + (_Account level_ delivery) or only events for a specific set of workspaces + (_workspace level_ delivery). Account level log delivery applies to all + current and future workspaces plus account level logs, while workspace level + log delivery solely delivers logs related to the specified workspaces. You can + create multiple types of delivery configurations per account. For billable usage delivery: * For more information about billable usage logs, see [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * @@ -75,35 +79,49 @@ var Cmd = &cobra.Command{ [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html`, - Annotations: map[string]string{ - "package": "billing", - }, + GroupID: "billing", + Annotations: map[string]string{ + "package": "billing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq billing.WrappedCreateLogDeliveryConfiguration -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *billing.WrappedCreateLogDeliveryConfiguration, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq billing.WrappedCreateLogDeliveryConfiguration + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: log_delivery_configuration -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new log delivery configuration.`, - Long: `Create a new log delivery configuration. + cmd.Use = "create" + cmd.Short = `Create a new log delivery configuration.` + cmd.Long = `Create a new log delivery configuration. Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you - already created a [credential object](#operation/create-credential-config) - (which encapsulates a cross-account service IAM role) and a [storage - configuration object](#operation/create-storage-config) (which encapsulates an - S3 bucket). + already created a [credential object](:method:Credentials/Create) (which + encapsulates a cross-account service IAM role) and a [storage configuration + object](:method:Storage/Create) (which encapsulates an S3 bucket). For full details, including the required IAM role policies and bucket policies, see [Deliver and access billable usage logs] or [Configure audit @@ -120,21 +138,23 @@ var createCmd = &cobra.Command{ You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery - configuration](#operation/patch-log-delivery-config-status)). + configuration](:method:LogDelivery/PatchStatus)). [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html`, + [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -151,32 +171,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start get command -var getReq billing.GetLogDeliveryRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *billing.GetLogDeliveryRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq billing.GetLogDeliveryRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get LOG_DELIVERY_CONFIGURATION_ID", - Short: `Get log delivery configuration.`, - Long: `Get log delivery configuration. + cmd.Use = "get LOG_DELIVERY_CONFIGURATION_ID" + cmd.Short = `Get log delivery configuration.` + cmd.Long = `Get log delivery configuration. Gets a Databricks log delivery configuration object for an account, both - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -204,45 +245,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq billing.ListLogDeliveryRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *billing.ListLogDeliveryRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq billing.ListLogDeliveryRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.CredentialsId, "credentials-id", listReq.CredentialsId, `Filter by credential configuration ID.`) - listCmd.Flags().Var(&listReq.Status, "status", `Filter by status ENABLED or DISABLED.`) - listCmd.Flags().StringVar(&listReq.StorageConfigurationId, "storage-configuration-id", listReq.StorageConfigurationId, `Filter by storage configuration ID.`) + cmd.Flags().StringVar(&listReq.CredentialsId, "credentials-id", listReq.CredentialsId, `Filter by credential configuration ID.`) + cmd.Flags().Var(&listReq.Status, "status", `Filter by status ENABLED or DISABLED.`) + cmd.Flags().StringVar(&listReq.StorageConfigurationId, "storage-configuration-id", listReq.StorageConfigurationId, `Filter by storage configuration ID.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all log delivery configurations.`, - Long: `Get all log delivery configurations. + cmd.Use = "list" + cmd.Short = `Get all log delivery configurations.` + cmd.Long = `Get all log delivery configurations. Gets all Databricks log delivery configurations associated with an account - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -259,39 +322,61 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch-status command -var patchStatusReq billing.UpdateLogDeliveryConfigurationStatusRequest -func init() { - Cmd.AddCommand(patchStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchStatusOverrides []func( + *cobra.Command, + *billing.UpdateLogDeliveryConfigurationStatusRequest, +) + +func newPatchStatus() *cobra.Command { + cmd := &cobra.Command{} + + var patchStatusReq billing.UpdateLogDeliveryConfigurationStatusRequest + // TODO: short flags -} - -var patchStatusCmd = &cobra.Command{ - Use: "patch-status STATUS LOG_DELIVERY_CONFIGURATION_ID", - Short: `Enable or disable log delivery configuration.`, - Long: `Enable or disable log delivery configuration. + cmd.Use = "patch-status STATUS LOG_DELIVERY_CONFIGURATION_ID" + cmd.Short = `Enable or disable log delivery configuration.` + cmd.Long = `Enable or disable log delivery configuration. Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under - [Create log delivery](#operation/create-log-delivery-config).`, + [Create log delivery](:method:LogDelivery/Create).` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -306,10 +391,24 @@ var patchStatusCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchStatusOverrides { + fn(cmd, &patchStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatchStatus()) + }) } // end service LogDelivery diff --git a/cmd/account/metastore-assignments/metastore-assignments.go b/cmd/account/metastore-assignments/metastore-assignments.go index 673bb8f4c..24c4eb699 100755 --- a/cmd/account/metastore-assignments/metastore-assignments.go +++ b/cmd/account/metastore-assignments/metastore-assignments.go @@ -12,43 +12,64 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "metastore-assignments", - Short: `These APIs manage metastore assignments to a workspace.`, - Long: `These APIs manage metastore assignments to a workspace.`, - Annotations: map[string]string{ - "package": "catalog", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "metastore-assignments", + Short: `These APIs manage metastore assignments to a workspace.`, + Long: `These APIs manage metastore assignments to a workspace.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.AccountsCreateMetastoreAssignment -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.AccountsCreateMetastoreAssignment, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.AccountsCreateMetastoreAssignment + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_assignment -} - -var createCmd = &cobra.Command{ - Use: "create WORKSPACE_ID METASTORE_ID", - Short: `Assigns a workspace to a metastore.`, - Long: `Assigns a workspace to a metastore. + cmd.Use = "create WORKSPACE_ID METASTORE_ID" + cmd.Short = `Assigns a workspace to a metastore.` + cmd.Long = `Assigns a workspace to a metastore. - Creates an assignment to a metastore for a workspace Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + Creates an assignment to a metastore for a workspace` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -64,42 +85,63 @@ var createCmd = &cobra.Command{ } createReq.MetastoreId = args[1] - response, err := a.MetastoreAssignments.Create(ctx, createReq) + err = a.MetastoreAssignments.Create(ctx, createReq) if err != nil { return err } - return cmdio.Render(ctx, response) - }, + return nil + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteAccountMetastoreAssignmentRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteAccountMetastoreAssignmentRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteAccountMetastoreAssignmentRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete WORKSPACE_ID METASTORE_ID", - Short: `Delete a metastore assignment.`, - Long: `Delete a metastore assignment. + cmd.Use = "delete WORKSPACE_ID METASTORE_ID" + cmd.Short = `Delete a metastore assignment.` + cmd.Long = `Delete a metastore assignment. Deletes a metastore assignment to a workspace, leaving the workspace with no - metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API.`, + metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -114,39 +156,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetAccountMetastoreAssignmentRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetAccountMetastoreAssignmentRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetAccountMetastoreAssignmentRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get WORKSPACE_ID", - Short: `Gets the metastore assignment for a workspace.`, - Long: `Gets the metastore assignment for a workspace. + cmd.Use = "get WORKSPACE_ID" + cmd.Short = `Gets the metastore assignment for a workspace.` + cmd.Long = `Gets the metastore assignment for a workspace. Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a - 404 returned. Please add a header X-Databricks-Account-Console-API-Version: - 2.0 to access this API.`, + 404 returned.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -160,37 +223,58 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListAccountMetastoreAssignmentsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListAccountMetastoreAssignmentsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListAccountMetastoreAssignmentsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list METASTORE_ID", - Short: `Get all workspaces assigned to a metastore.`, - Long: `Get all workspaces assigned to a metastore. + cmd.Use = "list METASTORE_ID" + cmd.Short = `Get all workspaces assigned to a metastore.` + cmd.Long = `Get all workspaces assigned to a metastore. Gets a list of all Databricks workspace IDs that have been assigned to given - metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API`, + metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -201,41 +285,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.AccountsUpdateMetastoreAssignment -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.AccountsUpdateMetastoreAssignment, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.AccountsUpdateMetastoreAssignment + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_assignment -} - -var updateCmd = &cobra.Command{ - Use: "update WORKSPACE_ID METASTORE_ID", - Short: `Updates a metastore assignment to a workspaces.`, - Long: `Updates a metastore assignment to a workspaces. + cmd.Use = "update WORKSPACE_ID METASTORE_ID" + cmd.Short = `Updates a metastore assignment to a workspaces.` + cmd.Long = `Updates a metastore assignment to a workspaces. Updates an assignment to a metastore for a workspace. Currently, only the - default catalog may be updated. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + default catalog may be updated.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -256,10 +361,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountMetastoreAssignments diff --git a/cmd/account/metastores/metastores.go b/cmd/account/metastores/metastores.go index decbb4060..726b779d8 100755 --- a/cmd/account/metastores/metastores.go +++ b/cmd/account/metastores/metastores.go @@ -10,47 +10,68 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "metastores", - Short: `These APIs manage Unity Catalog metastores for an account.`, - Long: `These APIs manage Unity Catalog metastores for an account. A metastore +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "metastores", + Short: `These APIs manage Unity Catalog metastores for an account.`, + Long: `These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can be associated with workspaces`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.AccountsCreateMetastore -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.AccountsCreateMetastore, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.AccountsCreateMetastore + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_info -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create metastore.`, - Long: `Create metastore. + cmd.Use = "create" + cmd.Short = `Create metastore.` + cmd.Long = `Create metastore. - Creates a Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + Creates a Unity Catalog metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -67,36 +88,59 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteAccountMetastoreRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteAccountMetastoreRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteAccountMetastoreRequest + // TODO: short flags -} + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) -var deleteCmd = &cobra.Command{ - Use: "delete METASTORE_ID", - Short: `Delete a metastore.`, - Long: `Delete a metastore. + cmd.Use = "delete METASTORE_ID" + cmd.Short = `Delete a metastore.` + cmd.Long = `Delete a metastore. - Deletes a Unity Catalog metastore for an account, both specified by ID. Please - add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + Deletes a Unity Catalog metastore for an account, both specified by ID.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -107,36 +151,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetAccountMetastoreRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetAccountMetastoreRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetAccountMetastoreRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get METASTORE_ID", - Short: `Get a metastore.`, - Long: `Get a metastore. + cmd.Use = "get METASTORE_ID" + cmd.Short = `Get a metastore.` + cmd.Long = `Get a metastore. - Gets a Unity Catalog metastore from an account, both specified by ID. Please - add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + Gets a Unity Catalog metastore from an account, both specified by ID.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -147,72 +212,109 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all metastores associated with an account.`, - Long: `Get all metastores associated with an account. + cmd.Use = "list" + cmd.Short = `Get all metastores associated with an account.` + cmd.Long = `Get all metastores associated with an account. - Gets all Unity Catalog metastores associated with an account specified by ID. - Please add a header X-Databricks-Account-Console-API-Version: 2.0 to access - this API.`, + Gets all Unity Catalog metastores associated with an account specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - response, err := a.Metastores.List(ctx) + response, err := a.Metastores.ListAll(ctx) if err != nil { return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.AccountsUpdateMetastore -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.AccountsUpdateMetastore, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.AccountsUpdateMetastore + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_info -} - -var updateCmd = &cobra.Command{ - Use: "update METASTORE_ID", - Short: `Update a metastore.`, - Long: `Update a metastore. + cmd.Use = "update METASTORE_ID" + cmd.Short = `Update a metastore.` + cmd.Long = `Update a metastore. - Updates an existing Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + Updates an existing Unity Catalog metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -229,10 +331,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountMetastores diff --git a/cmd/account/network-policy/network-policy.go b/cmd/account/network-policy/network-policy.go new file mode 100755 index 000000000..60db933ab --- /dev/null +++ b/cmd/account/network-policy/network-policy.go @@ -0,0 +1,243 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package network_policy + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "network-policy", + Short: `Network policy is a set of rules that defines what can be accessed from your Databricks network.`, + Long: `Network policy is a set of rules that defines what can be accessed from your + Databricks network. E.g.: You can choose to block your SQL UDF to access + internet from your Databricks serverless clusters. + + There is only one instance of this setting per account. Since this setting has + a default value, this setting is present on all accounts even though it's + never set on a given account. Deletion reverts the value of the setting back + to the default value.`, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start delete-account-network-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteAccountNetworkPolicyOverrides []func( + *cobra.Command, + *settings.DeleteAccountNetworkPolicyRequest, +) + +func newDeleteAccountNetworkPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var deleteAccountNetworkPolicyReq settings.DeleteAccountNetworkPolicyRequest + + // TODO: short flags + + cmd.Use = "delete-account-network-policy ETAG" + cmd.Short = `Delete Account Network Policy.` + cmd.Long = `Delete Account Network Policy. + + Reverts back all the account network policies back to default.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := root.AccountClient(ctx) + + deleteAccountNetworkPolicyReq.Etag = args[0] + + response, err := a.NetworkPolicy.DeleteAccountNetworkPolicy(ctx, deleteAccountNetworkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteAccountNetworkPolicyOverrides { + fn(cmd, &deleteAccountNetworkPolicyReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteAccountNetworkPolicy()) + }) +} + +// start read-account-network-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var readAccountNetworkPolicyOverrides []func( + *cobra.Command, + *settings.ReadAccountNetworkPolicyRequest, +) + +func newReadAccountNetworkPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var readAccountNetworkPolicyReq settings.ReadAccountNetworkPolicyRequest + + // TODO: short flags + + cmd.Use = "read-account-network-policy ETAG" + cmd.Short = `Get Account Network Policy.` + cmd.Long = `Get Account Network Policy. + + Gets the value of Account level Network Policy.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := root.AccountClient(ctx) + + readAccountNetworkPolicyReq.Etag = args[0] + + response, err := a.NetworkPolicy.ReadAccountNetworkPolicy(ctx, readAccountNetworkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range readAccountNetworkPolicyOverrides { + fn(cmd, &readAccountNetworkPolicyReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReadAccountNetworkPolicy()) + }) +} + +// start update-account-network-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateAccountNetworkPolicyOverrides []func( + *cobra.Command, + *settings.UpdateAccountNetworkPolicyRequest, +) + +func newUpdateAccountNetworkPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var updateAccountNetworkPolicyReq settings.UpdateAccountNetworkPolicyRequest + var updateAccountNetworkPolicyJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateAccountNetworkPolicyJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&updateAccountNetworkPolicyReq.AllowMissing, "allow-missing", updateAccountNetworkPolicyReq.AllowMissing, `This should always be set to true for Settings RPCs.`) + // TODO: complex arg: setting + + cmd.Use = "update-account-network-policy" + cmd.Short = `Update Account Network Policy.` + cmd.Long = `Update Account Network Policy. + + Updates the policy content of Account level Network Policy.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := root.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateAccountNetworkPolicyJson.Unmarshal(&updateAccountNetworkPolicyReq) + if err != nil { + return err + } + } else { + } + + response, err := a.NetworkPolicy.UpdateAccountNetworkPolicy(ctx, updateAccountNetworkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateAccountNetworkPolicyOverrides { + fn(cmd, &updateAccountNetworkPolicyReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateAccountNetworkPolicy()) + }) +} + +// end service AccountNetworkPolicy diff --git a/cmd/account/networks/networks.go b/cmd/account/networks/networks.go index 331f0869a..f481ffdbd 100755 --- a/cmd/account/networks/networks.go +++ b/cmd/account/networks/networks.go @@ -12,52 +12,74 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "networks", - Short: `These APIs manage network configurations for customer-managed VPCs (optional).`, - Long: `These APIs manage network configurations for customer-managed VPCs (optional). +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "networks", + Short: `These APIs manage network configurations for customer-managed VPCs (optional).`, + Long: `These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when creating a new workspace if you use customer-managed VPCs.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateNetworkRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateNetworkRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateNetworkRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: gcp_network_info // TODO: array: security_group_ids // TODO: array: subnet_ids // TODO: complex arg: vpc_endpoints - createCmd.Flags().StringVar(&createReq.VpcId, "vpc-id", createReq.VpcId, `The ID of the VPC associated with this network.`) + cmd.Flags().StringVar(&createReq.VpcId, "vpc-id", createReq.VpcId, `The ID of the VPC associated with this network.`) -} - -var createCmd = &cobra.Command{ - Use: "create NETWORK_NAME", - Short: `Create network configuration.`, - Long: `Create network configuration. + cmd.Use = "create NETWORK_NAME" + cmd.Short = `Create network configuration.` + cmd.Long = `Create network configuration. Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a - pre-existing VPC and subnets.`, + pre-existing VPC and subnets.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -75,36 +97,57 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteNetworkRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteNetworkRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteNetworkRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NETWORK_ID", - Short: `Delete a network configuration.`, - Long: `Delete a network configuration. + cmd.Use = "delete NETWORK_ID" + cmd.Short = `Delete a network configuration.` + cmd.Long = `Delete a network configuration. Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. This operation is available only if your account is on the E2 version of the - platform.`, + platform.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -132,32 +175,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetNetworkRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetNetworkRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetNetworkRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NETWORK_ID", - Short: `Get a network configuration.`, - Long: `Get a network configuration. + cmd.Use = "get NETWORK_ID" + cmd.Short = `Get a network configuration.` + cmd.Long = `Get a network configuration. Gets a Databricks network configuration, which represents a cloud VPC and its - resources.`, + resources.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -185,33 +249,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all network configurations.`, - Long: `Get all network configurations. + cmd.Use = "list" + cmd.Short = `Get all network configurations.` + cmd.Long = `Get all network configurations. Gets a list of all Databricks network configurations for an account, specified by ID. This operation is available only if your account is on the E2 version of the - platform.`, + platform.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Networks.List(ctx) @@ -219,10 +301,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Networks diff --git a/cmd/account/networks/overrides.go b/cmd/account/networks/overrides.go index d47b9ce38..082ee242d 100644 --- a/cmd/account/networks/overrides.go +++ b/cmd/account/networks/overrides.go @@ -1,9 +1,16 @@ package networks -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.NetworkId | green}} {{.NetworkName}} {{.WorkspaceId}} {{.VpcStatus}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/o-auth-enrollment/o-auth-enrollment.go b/cmd/account/o-auth-enrollment/o-auth-enrollment.go index a39306a37..91fdfa0a7 100755 --- a/cmd/account/o-auth-enrollment/o-auth-enrollment.go +++ b/cmd/account/o-auth-enrollment/o-auth-enrollment.go @@ -10,36 +10,56 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "o-auth-enrollment", - Short: `These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.`, - Long: `These APIs enable administrators to enroll OAuth for their accounts, which is +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "o-auth-enrollment", + Short: `These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.`, + Long: `These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration. **Note:** Your account must be on the E2 version to use these APIs, this is because OAuth is only supported on the E2 version.`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreateOAuthEnrollment -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateOAuthEnrollment, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateOAuthEnrollment + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.EnableAllPublishedApps, "enable-all-published-apps", createReq.EnableAllPublishedApps, `If true, enable OAuth for all the published applications in the account.`) + cmd.Flags().BoolVar(&createReq.EnableAllPublishedApps, "enable-all-published-apps", createReq.EnableAllPublishedApps, `If true, enable OAuth for all the published applications in the account.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create OAuth Enrollment request.`, - Long: `Create OAuth Enrollment request. + cmd.Use = "create" + cmd.Short = `Create OAuth Enrollment request.` + cmd.Long = `Create OAuth Enrollment request. Create an OAuth Enrollment request to enroll OAuth for this account and optionally enable the OAuth integration for all the partner applications in @@ -49,18 +69,20 @@ var createCmd = &cobra.Command{ The enrollment is executed asynchronously, so the API will return 204 immediately. The actual enrollment take a few minutes, you can check the - status via API :method:OAuthEnrollment/get.`, + status via API :method:OAuthEnrollment/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -77,32 +99,50 @@ var createCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start get command -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, +) -} +func newGet() *cobra.Command { + cmd := &cobra.Command{} -var getCmd = &cobra.Command{ - Use: "get", - Short: `Get OAuth enrollment status.`, - Long: `Get OAuth enrollment status. + cmd.Use = "get" + cmd.Short = `Get OAuth enrollment status.` + cmd.Long = `Get OAuth enrollment status. Gets the OAuth enrollment status for this Account. You can only add/use the OAuth published/custom application integrations when - OAuth enrollment status is enabled.`, + OAuth enrollment status is enabled.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.OAuthEnrollment.Get(ctx) @@ -110,10 +150,24 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // end service OAuthEnrollment diff --git a/cmd/account/o-auth-enrollment/overrides.go b/cmd/account/o-auth-enrollment/overrides.go new file mode 100644 index 000000000..1fc3aacc1 --- /dev/null +++ b/cmd/account/o-auth-enrollment/overrides.go @@ -0,0 +1,107 @@ +package o_auth_enrollment + +import ( + "context" + "fmt" + "time" + + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/retries" + "github.com/databricks/databricks-sdk-go/service/oauth2" + "github.com/spf13/cobra" +) + +func promptForBasicAccountConfig(ctx context.Context) (*databricks.Config, error) { + if !cmdio.IsInTTY(ctx) { + return nil, fmt.Errorf("this command requires a TTY") + } + // OAuth Enrollment only works on AWS + host, err := cmdio.DefaultPrompt(ctx, "Host", "https://accounts.cloud.databricks.com") + if err != nil { + return nil, fmt.Errorf("host: %w", err) + } + accountID, err := cmdio.SimplePrompt(ctx, "Account ID") + if err != nil { + return nil, fmt.Errorf("account: %w", err) + } + username, err := cmdio.SimplePrompt(ctx, "Username") + if err != nil { + return nil, fmt.Errorf("username: %w", err) + } + password, err := cmdio.Secret(ctx, "Password") + if err != nil { + return nil, fmt.Errorf("password: %w", err) + } + return &databricks.Config{ + Host: host, + AccountID: accountID, + Username: username, + Password: password, + }, nil +} + +func enableOAuthForAccount(ctx context.Context, cfg *databricks.Config) error { + ac, err := databricks.NewAccountClient(cfg) + if err != nil { + return fmt.Errorf("failed to instantiate account client: %w", err) + } + // The enrollment is executed asynchronously, so the API returns HTTP 204 immediately + err = ac.OAuthEnrollment.Create(ctx, oauth2.CreateOAuthEnrollment{ + EnableAllPublishedApps: true, + }) + if err != nil { + return fmt.Errorf("failed to create oauth enrollment: %w", err) + } + enableSpinner := cmdio.Spinner(ctx) + // The actual enrollment take a few minutes + err = retries.Wait(ctx, 10*time.Minute, func() *retries.Err { + status, err := ac.OAuthEnrollment.Get(ctx) + if err != nil { + return retries.Halt(err) + } + if !status.IsEnabled { + msg := "Enabling OAuth..." + enableSpinner <- msg + return retries.Continues(msg) + } + enableSpinner <- "OAuth is enabled" + close(enableSpinner) + return nil + }) + if err != nil { + return fmt.Errorf("wait for enrollment: %w", err) + } + // enable Databricks CLI, so that `databricks auth login` works + _, err = ac.PublishedAppIntegration.Create(ctx, oauth2.CreatePublishedAppIntegration{ + AppId: "databricks-cli", + }) + if err != nil { + return fmt.Errorf("failed to enable databricks CLI: %w", err) + } + return nil +} + +func newEnable() *cobra.Command { + return &cobra.Command{ + Use: "enable", + Short: "Enable Databricks CLI, Tableau Desktop, and PowerBI for this account.", + Long: `Before you can do 'databricks auth login', you have to enable OAuth for this account. + +This command prompts you for Account ID, username, and password and waits until OAuth is enabled.`, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + cfg, err := promptForBasicAccountConfig(ctx) + if err != nil { + return fmt.Errorf("account config: %w", err) + } + return enableOAuthForAccount(ctx, cfg) + }, + } +} + +func init() { + cmdOverrides = append(cmdOverrides, func(c *cobra.Command) { + c.AddCommand(newEnable()) + }) +} diff --git a/cmd/account/private-access/private-access.go b/cmd/account/private-access/private-access.go index ebb31dd03..9cbc09290 100755 --- a/cmd/account/private-access/private-access.go +++ b/cmd/account/private-access/private-access.go @@ -12,34 +12,54 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "private-access", - Short: `These APIs manage private access settings for this account.`, - Long: `These APIs manage private access settings for this account.`, - Annotations: map[string]string{ - "package": "provisioning", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "private-access", + Short: `These APIs manage private access settings for this account.`, + Long: `These APIs manage private access settings for this account.`, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.UpsertPrivateAccessSettingsRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.UpsertPrivateAccessSettingsRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.UpsertPrivateAccessSettingsRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: allowed_vpc_endpoint_ids - createCmd.Flags().Var(&createReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) - createCmd.Flags().BoolVar(&createReq.PublicAccessEnabled, "public-access-enabled", createReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) + cmd.Flags().Var(&createReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) + cmd.Flags().BoolVar(&createReq.PublicAccessEnabled, "public-access-enabled", createReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) -} - -var createCmd = &cobra.Command{ - Use: "create PRIVATE_ACCESS_SETTINGS_NAME REGION", - Short: `Create private access settings.`, - Long: `Create private access settings. + cmd.Use = "create PRIVATE_ACCESS_SETTINGS_NAME REGION" + cmd.Short = `Create private access settings.` + cmd.Long = `Create private access settings. Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must @@ -55,18 +75,20 @@ var createCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -85,25 +107,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeletePrivateAccesRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeletePrivateAccesRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeletePrivateAccesRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete PRIVATE_ACCESS_SETTINGS_ID", - Short: `Delete a private access settings object.`, - Long: `Delete a private access settings object. + cmd.Use = "delete PRIVATE_ACCESS_SETTINGS_ID" + cmd.Short = `Delete a private access settings object.` + cmd.Long = `Delete a private access settings object. Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. @@ -112,11 +154,12 @@ var deleteCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -144,25 +187,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetPrivateAccesRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetPrivateAccesRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetPrivateAccesRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get PRIVATE_ACCESS_SETTINGS_ID", - Short: `Get a private access settings object.`, - Long: `Get a private access settings object. + cmd.Use = "get PRIVATE_ACCESS_SETTINGS_ID" + cmd.Short = `Get a private access settings object.` + cmd.Long = `Get a private access settings object. Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. @@ -171,11 +234,12 @@ var getCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -203,30 +267,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all private access settings objects.`, - Long: `Get all private access settings objects. + cmd.Use = "list" + cmd.Short = `Get all private access settings objects.` + cmd.Long = `Get all private access settings objects. Gets a list of all private access settings objects for an account, specified - by ID.`, + by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.PrivateAccess.List(ctx) @@ -234,31 +316,51 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start replace command -var replaceReq provisioning.UpsertPrivateAccessSettingsRequest -var replaceJson flags.JsonFlag -func init() { - Cmd.AddCommand(replaceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var replaceOverrides []func( + *cobra.Command, + *provisioning.UpsertPrivateAccessSettingsRequest, +) + +func newReplace() *cobra.Command { + cmd := &cobra.Command{} + + var replaceReq provisioning.UpsertPrivateAccessSettingsRequest + var replaceJson flags.JsonFlag + // TODO: short flags - replaceCmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: allowed_vpc_endpoint_ids - replaceCmd.Flags().Var(&replaceReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) - replaceCmd.Flags().BoolVar(&replaceReq.PublicAccessEnabled, "public-access-enabled", replaceReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) + cmd.Flags().Var(&replaceReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) + cmd.Flags().BoolVar(&replaceReq.PublicAccessEnabled, "public-access-enabled", replaceReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) -} - -var replaceCmd = &cobra.Command{ - Use: "replace PRIVATE_ACCESS_SETTINGS_NAME REGION PRIVATE_ACCESS_SETTINGS_ID", - Short: `Replace private access settings.`, - Long: `Replace private access settings. + cmd.Use = "replace PRIVATE_ACCESS_SETTINGS_NAME REGION PRIVATE_ACCESS_SETTINGS_ID" + cmd.Short = `Replace private access settings.` + cmd.Long = `Replace private access settings. Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a @@ -280,15 +382,17 @@ var replaceCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -307,10 +411,24 @@ var replaceCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range replaceOverrides { + fn(cmd, &replaceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReplace()) + }) } // end service PrivateAccess diff --git a/cmd/account/published-app-integration/published-app-integration.go b/cmd/account/published-app-integration/published-app-integration.go index 7eb6d4c9e..b367ad71a 100755 --- a/cmd/account/published-app-integration/published-app-integration.go +++ b/cmd/account/published-app-integration/published-app-integration.go @@ -10,55 +10,77 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "published-app-integration", - Short: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, - Long: `These APIs enable administrators to manage published oauth app integrations, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "published-app-integration", + Short: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, + Long: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. **Note:** You can only add/use the OAuth published application integrations when OAuth enrollment status is enabled. For more details see :method:OAuthEnrollment/create`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreatePublishedAppIntegration -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreatePublishedAppIntegration, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreatePublishedAppIntegration + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.AppId, "app-id", createReq.AppId, `app_id of the oauth published app integration.`) + cmd.Flags().StringVar(&createReq.AppId, "app-id", createReq.AppId, `app_id of the oauth published app integration.`) // TODO: complex arg: token_access_policy -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create Published OAuth App Integration.`, - Long: `Create Published OAuth App Integration. + cmd.Use = "create" + cmd.Short = `Create Published OAuth App Integration.` + cmd.Long = `Create Published OAuth App Integration. Create Published OAuth App Integration. You can retrieve the published oauth app integration via - :method:PublishedAppIntegration/get.`, + :method:PublishedAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -75,36 +97,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq oauth2.DeletePublishedAppIntegrationRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeletePublishedAppIntegrationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeletePublishedAppIntegrationRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete INTEGRATION_ID", - Short: `Delete Published OAuth App Integration.`, - Long: `Delete Published OAuth App Integration. + cmd.Use = "delete INTEGRATION_ID" + cmd.Short = `Delete Published OAuth App Integration.` + cmd.Long = `Delete Published OAuth App Integration. Delete an existing Published OAuth App Integration. You can retrieve the - published oauth app integration via :method:PublishedAppIntegration/get.`, + published oauth app integration via :method:PublishedAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -115,35 +159,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq oauth2.GetPublishedAppIntegrationRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *oauth2.GetPublishedAppIntegrationRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq oauth2.GetPublishedAppIntegrationRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get INTEGRATION_ID", - Short: `Get OAuth Published App Integration.`, - Long: `Get OAuth Published App Integration. + cmd.Use = "get INTEGRATION_ID" + cmd.Short = `Get OAuth Published App Integration.` + cmd.Long = `Get OAuth Published App Integration. - Gets the Published OAuth App Integration for the given integration id.`, + Gets the Published OAuth App Integration for the given integration id.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -154,30 +220,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get published oauth app integrations.`, - Long: `Get published oauth app integrations. + cmd.Use = "list" + cmd.Short = `Get published oauth app integrations.` + cmd.Long = `Get published oauth app integrations. Get the list of published oauth app integrations for the specified Databricks - account`, + account` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.PublishedAppIntegration.ListAll(ctx) @@ -185,40 +269,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq oauth2.UpdatePublishedAppIntegration -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *oauth2.UpdatePublishedAppIntegration, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq oauth2.UpdatePublishedAppIntegration + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: token_access_policy -} - -var updateCmd = &cobra.Command{ - Use: "update INTEGRATION_ID", - Short: `Updates Published OAuth App Integration.`, - Long: `Updates Published OAuth App Integration. + cmd.Use = "update INTEGRATION_ID" + cmd.Short = `Updates Published OAuth App Integration.` + cmd.Long = `Updates Published OAuth App Integration. Updates an existing published OAuth App Integration. You can retrieve the - published oauth app integration via :method:PublishedAppIntegration/get.`, + published oauth app integration via :method:PublishedAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -235,10 +341,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service PublishedAppIntegration diff --git a/cmd/account/service-principal-secrets/service-principal-secrets.go b/cmd/account/service-principal-secrets/service-principal-secrets.go index 8c4c1fb95..a28f75faa 100755 --- a/cmd/account/service-principal-secrets/service-principal-secrets.go +++ b/cmd/account/service-principal-secrets/service-principal-secrets.go @@ -11,10 +11,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "service-principal-secrets", - Short: `These APIs enable administrators to manage service principal secrets.`, - Long: `These APIs enable administrators to manage service principal secrets. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principal-secrets", + Short: `These APIs enable administrators to manage service principal secrets.`, + Long: `These APIs enable administrators to manage service principal secrets. You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace @@ -27,34 +32,51 @@ var Cmd = &cobra.Command{ [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreateServicePrincipalSecretRequest -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateServicePrincipalSecretRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateServicePrincipalSecretRequest + // TODO: short flags -} - -var createCmd = &cobra.Command{ - Use: "create SERVICE_PRINCIPAL_ID", - Short: `Create service principal secret.`, - Long: `Create service principal secret. + cmd.Use = "create SERVICE_PRINCIPAL_ID" + cmd.Short = `Create service principal secret.` + cmd.Long = `Create service principal secret. - Create a secret for the given service principal.`, + Create a secret for the given service principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -68,35 +90,57 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq oauth2.DeleteServicePrincipalSecretRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeleteServicePrincipalSecretRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeleteServicePrincipalSecretRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete SERVICE_PRINCIPAL_ID SECRET_ID", - Short: `Delete service principal secret.`, - Long: `Delete service principal secret. + cmd.Use = "delete SERVICE_PRINCIPAL_ID SECRET_ID" + cmd.Short = `Delete service principal secret.` + cmd.Long = `Delete service principal secret. - Delete a secret from the given service principal.`, + Delete a secret from the given service principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -111,37 +155,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start list command -var listReq oauth2.ListServicePrincipalSecretsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *oauth2.ListServicePrincipalSecretsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq oauth2.ListServicePrincipalSecretsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list SERVICE_PRINCIPAL_ID", - Short: `List service principal secrets.`, - Long: `List service principal secrets. + cmd.Use = "list SERVICE_PRINCIPAL_ID" + cmd.Short = `List service principal secrets.` + cmd.Long = `List service principal secrets. List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the - secret values.`, + secret values.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -155,10 +221,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service ServicePrincipalSecrets diff --git a/cmd/account/service-principals/overrides.go b/cmd/account/service-principals/overrides.go index c335bead6..d94a4267c 100644 --- a/cmd/account/service-principals/overrides.go +++ b/cmd/account/service-principals/overrides.go @@ -1,9 +1,17 @@ package service_principals -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *iam.ListAccountServicePrincipalsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.ApplicationId}} {{.DisplayName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/service-principals/service-principals.go b/cmd/account/service-principals/service-principals.go index 6ed4a69ab..f5823c692 100755 --- a/cmd/account/service-principals/service-principals.go +++ b/cmd/account/service-principals/service-principals.go @@ -12,57 +12,79 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "service-principals", - Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, - Long: `Identities for use with jobs, automated tools, and systems such as scripts, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principals", + Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, + Long: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.ServicePrincipal -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.ServicePrincipal + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a service principal.`, - Long: `Create a service principal. + cmd.Use = "create" + cmd.Short = `Create a service principal.` + cmd.Long = `Create a service principal. - Creates a new service principal in the Databricks account.`, + Creates a new service principal in the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -79,31 +101,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteAccountServicePrincipalRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountServicePrincipalRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountServicePrincipalRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a service principal.`, - Long: `Delete a service principal. + cmd.Use = "delete ID" + cmd.Short = `Delete a service principal.` + cmd.Long = `Delete a service principal. - Delete a single service principal in the Databricks account.`, + Delete a single service principal in the Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -131,32 +174,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetAccountServicePrincipalRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountServicePrincipalRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountServicePrincipalRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get service principal details.`, - Long: `Get service principal details. + cmd.Use = "get ID" + cmd.Short = `Get service principal details.` + cmd.Long = `Get service principal details. Gets the details for a single service principal define in the Databricks - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -184,48 +248,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListAccountServicePrincipalsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountServicePrincipalsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountServicePrincipalsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List service principals.`, - Long: `List service principals. + cmd.Use = "list" + cmd.Short = `List service principals.` + cmd.Long = `List service principals. - Gets the set of service principals associated with a Databricks account.`, + Gets the set of service principals associated with a Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -242,36 +328,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schemas -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update service principal details.`, - Long: `Update service principal details. + cmd.Use = "patch ID" + cmd.Short = `Update service principal details.` + cmd.Long = `Update service principal details. Partially updates the details of a single service principal in the Databricks - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -305,44 +413,65 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.ServicePrincipal -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.ServicePrincipal + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace service principal.`, - Long: `Replace service principal. + cmd.Use = "update ID" + cmd.Short = `Replace service principal.` + cmd.Long = `Replace service principal. Updates the details of a single service principal. - This action replaces the existing service principal with the same name.`, + This action replaces the existing service principal with the same name.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -377,10 +506,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountServicePrincipals diff --git a/cmd/account/settings/settings.go b/cmd/account/settings/settings.go index 8c507c3f6..4e98119dd 100755 --- a/cmd/account/settings/settings.go +++ b/cmd/account/settings/settings.go @@ -10,153 +10,201 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "settings", - Short: `TBD.`, - Long: `TBD`, - Annotations: map[string]string{ - "package": "settings", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) - // This service is being previewed; hide from help output. - Hidden: true, +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "settings", + Short: `The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources.`, + Long: `The Personal Compute enablement setting lets you control which users can use + the Personal Compute default policy to create compute resources. By default + all users in all workspaces have access (ON), but you can change the setting + to instead let individual workspaces configure access control (DELEGATE). + + There is only one instance of this setting per account. Since this setting has + a default value, this setting is present on all accounts even though it's + never set on a given account. Deletion reverts the value of the setting back + to the default value.`, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete-personal-compute-setting command -var deletePersonalComputeSettingReq settings.DeletePersonalComputeSettingRequest -var deletePersonalComputeSettingJson flags.JsonFlag -func init() { - Cmd.AddCommand(deletePersonalComputeSettingCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deletePersonalComputeSettingOverrides []func( + *cobra.Command, + *settings.DeletePersonalComputeSettingRequest, +) + +func newDeletePersonalComputeSetting() *cobra.Command { + cmd := &cobra.Command{} + + var deletePersonalComputeSettingReq settings.DeletePersonalComputeSettingRequest + // TODO: short flags - deletePersonalComputeSettingCmd.Flags().Var(&deletePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - deletePersonalComputeSettingCmd.Flags().StringVar(&deletePersonalComputeSettingReq.Etag, "etag", deletePersonalComputeSettingReq.Etag, `TBD.`) - -} - -var deletePersonalComputeSettingCmd = &cobra.Command{ - Use: "delete-personal-compute-setting", - Short: `Delete Personal Compute setting.`, - Long: `Delete Personal Compute setting. + cmd.Use = "delete-personal-compute-setting ETAG" + cmd.Short = `Delete Personal Compute setting.` + cmd.Long = `Delete Personal Compute setting. - TBD`, + Reverts back the Personal Compute setting value to default (ON)` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(0) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if cmd.Flags().Changed("json") { - err = deletePersonalComputeSettingJson.Unmarshal(&deletePersonalComputeSettingReq) - if err != nil { - return err - } - } else { - } + deletePersonalComputeSettingReq.Etag = args[0] response, err := a.Settings.DeletePersonalComputeSetting(ctx, deletePersonalComputeSettingReq) if err != nil { return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deletePersonalComputeSettingOverrides { + fn(cmd, &deletePersonalComputeSettingReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeletePersonalComputeSetting()) + }) } // start read-personal-compute-setting command -var readPersonalComputeSettingReq settings.ReadPersonalComputeSettingRequest -var readPersonalComputeSettingJson flags.JsonFlag -func init() { - Cmd.AddCommand(readPersonalComputeSettingCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var readPersonalComputeSettingOverrides []func( + *cobra.Command, + *settings.ReadPersonalComputeSettingRequest, +) + +func newReadPersonalComputeSetting() *cobra.Command { + cmd := &cobra.Command{} + + var readPersonalComputeSettingReq settings.ReadPersonalComputeSettingRequest + // TODO: short flags - readPersonalComputeSettingCmd.Flags().Var(&readPersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - readPersonalComputeSettingCmd.Flags().StringVar(&readPersonalComputeSettingReq.Etag, "etag", readPersonalComputeSettingReq.Etag, `TBD.`) - -} - -var readPersonalComputeSettingCmd = &cobra.Command{ - Use: "read-personal-compute-setting", - Short: `Get Personal Compute setting.`, - Long: `Get Personal Compute setting. + cmd.Use = "read-personal-compute-setting ETAG" + cmd.Short = `Get Personal Compute setting.` + cmd.Long = `Get Personal Compute setting. - TBD`, + Gets the value of the Personal Compute setting.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(0) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if cmd.Flags().Changed("json") { - err = readPersonalComputeSettingJson.Unmarshal(&readPersonalComputeSettingReq) - if err != nil { - return err - } - } else { - } + readPersonalComputeSettingReq.Etag = args[0] response, err := a.Settings.ReadPersonalComputeSetting(ctx, readPersonalComputeSettingReq) if err != nil { return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range readPersonalComputeSettingOverrides { + fn(cmd, &readPersonalComputeSettingReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReadPersonalComputeSetting()) + }) } // start update-personal-compute-setting command -var updatePersonalComputeSettingReq settings.UpdatePersonalComputeSettingRequest -var updatePersonalComputeSettingJson flags.JsonFlag -func init() { - Cmd.AddCommand(updatePersonalComputeSettingCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePersonalComputeSettingOverrides []func( + *cobra.Command, + *settings.UpdatePersonalComputeSettingRequest, +) + +func newUpdatePersonalComputeSetting() *cobra.Command { + cmd := &cobra.Command{} + + var updatePersonalComputeSettingReq settings.UpdatePersonalComputeSettingRequest + var updatePersonalComputeSettingJson flags.JsonFlag + // TODO: short flags - updatePersonalComputeSettingCmd.Flags().Var(&updatePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updatePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updatePersonalComputeSettingCmd.Flags().BoolVar(&updatePersonalComputeSettingReq.AllowMissing, "allow-missing", updatePersonalComputeSettingReq.AllowMissing, `TBD.`) + cmd.Flags().BoolVar(&updatePersonalComputeSettingReq.AllowMissing, "allow-missing", updatePersonalComputeSettingReq.AllowMissing, `This should always be set to true for Settings RPCs.`) // TODO: complex arg: setting -} - -var updatePersonalComputeSettingCmd = &cobra.Command{ - Use: "update-personal-compute-setting", - Short: `Update Personal Compute setting.`, - Long: `Update Personal Compute setting. + cmd.Use = "update-personal-compute-setting" + cmd.Short = `Update Personal Compute setting.` + cmd.Long = `Update Personal Compute setting. - TBD`, + Updates the value of the Personal Compute setting.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -173,10 +221,24 @@ var updatePersonalComputeSettingCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePersonalComputeSettingOverrides { + fn(cmd, &updatePersonalComputeSettingReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePersonalComputeSetting()) + }) } // end service AccountSettings diff --git a/cmd/account/storage-credentials/storage-credentials.go b/cmd/account/storage-credentials/storage-credentials.go index 73e328dc5..451b71121 100755 --- a/cmd/account/storage-credentials/storage-credentials.go +++ b/cmd/account/storage-credentials/storage-credentials.go @@ -10,32 +10,52 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "storage-credentials", - Short: `These APIs manage storage credentials for a particular metastore.`, - Long: `These APIs manage storage credentials for a particular metastore.`, - Annotations: map[string]string{ - "package": "catalog", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "storage-credentials", + Short: `These APIs manage storage credentials for a particular metastore.`, + Long: `These APIs manage storage credentials for a particular metastore.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.AccountsCreateStorageCredential -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.AccountsCreateStorageCredential, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.AccountsCreateStorageCredential + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: credential_info -} - -var createCmd = &cobra.Command{ - Use: "create METASTORE_ID", - Short: `Create a storage credential.`, - Long: `Create a storage credential. + cmd.Use = "create METASTORE_ID" + cmd.Short = `Create a storage credential.` + cmd.Long = `Create a storage credential. Creates a new storage credential. The request object is specific to the cloud: @@ -43,15 +63,17 @@ var createCmd = &cobra.Command{ credentials * **GcpServiceAcountKey** for GCP credentials. The caller must be a metastore admin and have the - **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.`, + **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -68,36 +90,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteAccountStorageCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteAccountStorageCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteAccountStorageCredentialRequest + // TODO: short flags -} + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the Storage Credential is not empty.`) -var deleteCmd = &cobra.Command{ - Use: "delete METASTORE_ID NAME", - Short: `Delete a storage credential.`, - Long: `Delete a storage credential. + cmd.Use = "delete METASTORE_ID NAME" + cmd.Short = `Delete a storage credential.` + cmd.Long = `Delete a storage credential. Deletes a storage credential from the metastore. The caller must be an owner - of the storage credential.`, + of the storage credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -109,37 +155,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetAccountStorageCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetAccountStorageCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetAccountStorageCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get METASTORE_ID NAME", - Short: `Gets the named storage credential.`, - Long: `Gets the named storage credential. + cmd.Use = "get METASTORE_ID NAME" + cmd.Short = `Gets the named storage credential.` + cmd.Long = `Gets the named storage credential. Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on - the storage credential.`, + the storage credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -151,36 +219,58 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListAccountStorageCredentialsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListAccountStorageCredentialsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListAccountStorageCredentialsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list METASTORE_ID", - Short: `Get all storage credentials assigned to a metastore.`, - Long: `Get all storage credentials assigned to a metastore. + cmd.Use = "list METASTORE_ID" + cmd.Short = `Get all storage credentials assigned to a metastore.` + cmd.Long = `Get all storage credentials assigned to a metastore. Gets a list of all storage credentials that have been assigned to given - metastore.`, + metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -191,41 +281,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.AccountsUpdateStorageCredential -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.AccountsUpdateStorageCredential, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.AccountsUpdateStorageCredential + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: credential_info -} - -var updateCmd = &cobra.Command{ - Use: "update METASTORE_ID NAME", - Short: `Updates a storage credential.`, - Long: `Updates a storage credential. + cmd.Use = "update METASTORE_ID NAME" + cmd.Short = `Updates a storage credential.` + cmd.Long = `Updates a storage credential. Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ - credential can be changed.`, + credential can be changed.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -243,10 +355,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountStorageCredentials diff --git a/cmd/account/storage/overrides.go b/cmd/account/storage/overrides.go index 76ca6ee1e..6ebe4a7a4 100644 --- a/cmd/account/storage/overrides.go +++ b/cmd/account/storage/overrides.go @@ -1,9 +1,16 @@ package storage -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.StorageConfigurationId | green}} {{.StorageConfigurationName}} {{.RootBucketInfo.BucketName}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/storage/storage.go b/cmd/account/storage/storage.go index 54821d4d4..8eebbab1d 100755 --- a/cmd/account/storage/storage.go +++ b/cmd/account/storage/storage.go @@ -12,35 +12,55 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "storage", - Short: `These APIs manage storage configurations for this workspace.`, - Long: `These APIs manage storage configurations for this workspace. A root storage S3 +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "storage", + Short: `These APIs manage storage configurations for this workspace.`, + Long: `These APIs manage storage configurations for this workspace. A root storage S3 bucket in your account is required to store objects like cluster logs, notebook revisions, and job results. You can also use the root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this bucket information, and its ID is used when creating a new workspace.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateStorageConfigurationRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateStorageConfigurationRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateStorageConfigurationRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create new storage configuration.`, - Long: `Create new storage configuration. + cmd.Use = "create" + cmd.Short = `Create new storage configuration.` + cmd.Long = `Create new storage configuration. Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your @@ -51,11 +71,12 @@ var createCmd = &cobra.Command{ For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -73,32 +94,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteStorageRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteStorageRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteStorageRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete STORAGE_CONFIGURATION_ID", - Short: `Delete storage configuration.`, - Long: `Delete storage configuration. + cmd.Use = "delete STORAGE_CONFIGURATION_ID" + cmd.Short = `Delete storage configuration.` + cmd.Long = `Delete storage configuration. Deletes a Databricks storage configuration. You cannot delete a storage - configuration that is associated with any workspace.`, + configuration that is associated with any workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -126,31 +168,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetStorageRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetStorageRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetStorageRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get STORAGE_CONFIGURATION_ID", - Short: `Get storage configuration.`, - Long: `Get storage configuration. + cmd.Use = "get STORAGE_CONFIGURATION_ID" + cmd.Short = `Get storage configuration.` + cmd.Long = `Get storage configuration. - Gets a Databricks storage configuration for an account, both specified by ID.`, + Gets a Databricks storage configuration for an account, both specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -178,30 +241,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all storage configurations.`, - Long: `Get all storage configurations. + cmd.Use = "list" + cmd.Short = `Get all storage configurations.` + cmd.Long = `Get all storage configurations. Gets a list of all Databricks storage configurations for your account, - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Storage.List(ctx) @@ -209,10 +290,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Storage diff --git a/cmd/account/users/overrides.go b/cmd/account/users/overrides.go index 45447a0ae..ff9773345 100644 --- a/cmd/account/users/overrides.go +++ b/cmd/account/users/overrides.go @@ -1,10 +1,18 @@ package users -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListAccountUsersRequest) { listReq.Attributes = "id,userName,groups,active" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.UserName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/users/users.go b/cmd/account/users/users.go index 8a0b40b83..375dd5b5b 100755 --- a/cmd/account/users/users.go +++ b/cmd/account/users/users.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "users", - Short: `User identities recognized by Databricks and represented by email addresses.`, - Long: `User identities recognized by Databricks and represented by email addresses. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "users", + Short: `User identities recognized by Databricks and represented by email addresses.`, + Long: `User identities recognized by Databricks and represented by email addresses. Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks account. SCIM @@ -26,51 +31,68 @@ var Cmd = &cobra.Command{ provider and that user’s account will also be removed from Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.User -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.User, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.User + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - createCmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new user.`, - Long: `Create a new user. + cmd.Use = "create" + cmd.Short = `Create a new user.` + cmd.Long = `Create a new user. Creates a new user in the Databricks account. This new user will also be added - to the Databricks account.`, + to the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -87,32 +109,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteAccountUserRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountUserRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountUserRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a user.`, - Long: `Delete a user. + cmd.Use = "delete ID" + cmd.Short = `Delete a user.` + cmd.Long = `Delete a user. Deletes a user. Deleting a user from a Databricks account also removes objects - associated with the user.`, + associated with the user.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -140,31 +183,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetAccountUserRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountUserRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountUserRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get user details.`, - Long: `Get user details. + cmd.Use = "get ID" + cmd.Short = `Get user details.` + cmd.Long = `Get user details. - Gets information for a specific user in Databricks account.`, + Gets information for a specific user in Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -192,48 +256,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListAccountUsersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountUsersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountUsersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List users.`, - Long: `List users. + cmd.Use = "list" + cmd.Short = `List users.` + cmd.Long = `List users. - Gets details for all the users associated with a Databricks account.`, + Gets details for all the users associated with a Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -250,36 +336,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schemas -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update user details.`, - Long: `Update user details. + cmd.Use = "patch ID" + cmd.Short = `Update user details.` + cmd.Long = `Update user details. Partially updates a user resource by applying the supplied operations on - specific user attributes.`, + specific user attributes.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -313,44 +421,65 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.User -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.User, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.User + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - updateCmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a user.`, - Long: `Replace a user. + cmd.Use = "update ID" + cmd.Short = `Replace a user.` + cmd.Long = `Replace a user. - Replaces a user's information with the data supplied in request.`, + Replaces a user's information with the data supplied in request.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -385,10 +514,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountUsers diff --git a/cmd/account/vpc-endpoints/vpc-endpoints.go b/cmd/account/vpc-endpoints/vpc-endpoints.go index 80ed3831e..5112b48d7 100755 --- a/cmd/account/vpc-endpoints/vpc-endpoints.go +++ b/cmd/account/vpc-endpoints/vpc-endpoints.go @@ -12,34 +12,54 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "vpc-endpoints", - Short: `These APIs manage VPC endpoint configurations for this account.`, - Long: `These APIs manage VPC endpoint configurations for this account.`, - Annotations: map[string]string{ - "package": "provisioning", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "vpc-endpoints", + Short: `These APIs manage VPC endpoint configurations for this account.`, + Long: `These APIs manage VPC endpoint configurations for this account.`, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateVpcEndpointRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateVpcEndpointRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateVpcEndpointRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.AwsVpcEndpointId, "aws-vpc-endpoint-id", createReq.AwsVpcEndpointId, `The ID of the VPC endpoint object in AWS.`) + cmd.Flags().StringVar(&createReq.AwsVpcEndpointId, "aws-vpc-endpoint-id", createReq.AwsVpcEndpointId, `The ID of the VPC endpoint object in AWS.`) // TODO: complex arg: gcp_vpc_endpoint_info - createCmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `The AWS region in which this VPC endpoint object exists.`) + cmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `The AWS region in which this VPC endpoint object exists.`) -} - -var createCmd = &cobra.Command{ - Use: "create VPC_ENDPOINT_NAME", - Short: `Create VPC endpoint configuration.`, - Long: `Create VPC endpoint configuration. + cmd.Use = "create VPC_ENDPOINT_NAME" + cmd.Short = `Create VPC endpoint configuration.` + cmd.Long = `Create VPC endpoint configuration. Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. @@ -53,18 +73,20 @@ var createCmd = &cobra.Command{ [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html`, + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -82,25 +104,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteVpcEndpointRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteVpcEndpointRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteVpcEndpointRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete VPC_ENDPOINT_ID", - Short: `Delete VPC endpoint configuration.`, - Long: `Delete VPC endpoint configuration. + cmd.Use = "delete VPC_ENDPOINT_ID" + cmd.Short = `Delete VPC endpoint configuration.` + cmd.Long = `Delete VPC endpoint configuration. Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. @@ -110,11 +152,12 @@ var deleteCmd = &cobra.Command{ [AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -142,35 +185,56 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetVpcEndpointRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetVpcEndpointRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetVpcEndpointRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get VPC_ENDPOINT_ID", - Short: `Get a VPC endpoint configuration.`, - Long: `Get a VPC endpoint configuration. + cmd.Use = "get VPC_ENDPOINT_ID" + cmd.Short = `Get a VPC endpoint configuration.` + cmd.Long = `Get a VPC endpoint configuration. Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html`, + [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -198,34 +262,52 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all VPC endpoint configurations.`, - Long: `Get all VPC endpoint configurations. + cmd.Use = "list" + cmd.Short = `Get all VPC endpoint configurations.` + cmd.Long = `Get all VPC endpoint configurations. Gets a list of all VPC endpoints for an account, specified by ID. Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.VpcEndpoints.List(ctx) @@ -233,10 +315,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service VpcEndpoints diff --git a/cmd/account/workspace-assignment/workspace-assignment.go b/cmd/account/workspace-assignment/workspace-assignment.go index dab357122..9e8c14045 100755 --- a/cmd/account/workspace-assignment/workspace-assignment.go +++ b/cmd/account/workspace-assignment/workspace-assignment.go @@ -12,40 +12,62 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace-assignment", - Short: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`, - Long: `The Workspace Permission Assignment API allows you to manage workspace +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-assignment", + Short: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`, + Long: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete command -var deleteReq iam.DeleteWorkspaceAssignmentRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteWorkspaceAssignmentRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteWorkspaceAssignmentRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete WORKSPACE_ID PRINCIPAL_ID", - Short: `Delete permissions assignment.`, - Long: `Delete permissions assignment. + cmd.Use = "delete WORKSPACE_ID PRINCIPAL_ID" + cmd.Short = `Delete permissions assignment.` + cmd.Long = `Delete permissions assignment. Deletes the workspace permissions assignment in a given account and workspace - for the specified principal.`, + for the specified principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,35 +85,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetWorkspaceAssignmentRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetWorkspaceAssignmentRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetWorkspaceAssignmentRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get WORKSPACE_ID", - Short: `List workspace permissions.`, - Long: `List workspace permissions. + cmd.Use = "get WORKSPACE_ID" + cmd.Short = `List workspace permissions.` + cmd.Long = `List workspace permissions. - Get an array of workspace permissions for the specified account and workspace.`, + Get an array of workspace permissions for the specified account and workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -105,36 +149,58 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListWorkspaceAssignmentRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListWorkspaceAssignmentRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListWorkspaceAssignmentRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list WORKSPACE_ID", - Short: `Get permission assignments.`, - Long: `Get permission assignments. + cmd.Use = "list WORKSPACE_ID" + cmd.Short = `Get permission assignments.` + cmd.Long = `Get permission assignments. Get the permission assignments for the specified Databricks account and - Databricks workspace.`, + Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -148,34 +214,55 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq iam.UpdateWorkspaceAssignments -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateWorkspaceAssignments, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateWorkspaceAssignments + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Create or update permissions assignment.`, - Long: `Create or update permissions assignment. + cmd.Use = "update" + cmd.Short = `Create or update permissions assignment.` + cmd.Long = `Create or update permissions assignment. Creates or updates the workspace permissions assignment in a given account and - workspace for the specified principal.`, + workspace for the specified principal.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -193,10 +280,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service WorkspaceAssignment diff --git a/cmd/account/workspaces/overrides.go b/cmd/account/workspaces/overrides.go index 458950242..283675c61 100644 --- a/cmd/account/workspaces/overrides.go +++ b/cmd/account/workspaces/overrides.go @@ -1,9 +1,16 @@ package workspaces -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{green "%d" .WorkspaceId}} {{.WorkspaceName}} {{.WorkspaceStatus}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/workspaces/workspaces.go b/cmd/account/workspaces/workspaces.go index 833d3cc00..60142a8a0 100755 --- a/cmd/account/workspaces/workspaces.go +++ b/cmd/account/workspaces/workspaces.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspaces", - Short: `These APIs manage workspaces for this account.`, - Long: `These APIs manage workspaces for this account. A Databricks workspace is an +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspaces", + Short: `These APIs manage workspaces for this account.`, + Long: `These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into folders, and provides access to data and computational resources such as clusters and jobs. @@ -24,45 +29,62 @@ var Cmd = &cobra.Command{ These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateWorkspaceRequest -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateWorkspaceRequest, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var createReq provisioning.CreateWorkspaceRequest + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.AwsRegion, "aws-region", createReq.AwsRegion, `The AWS region of the workspace's data plane.`) - createCmd.Flags().StringVar(&createReq.Cloud, "cloud", createReq.Cloud, `The cloud provider which the workspace uses.`) + cmd.Flags().StringVar(&createReq.AwsRegion, "aws-region", createReq.AwsRegion, `The AWS region of the workspace's data plane.`) + cmd.Flags().StringVar(&createReq.Cloud, "cloud", createReq.Cloud, `The cloud provider which the workspace uses.`) // TODO: complex arg: cloud_resource_container - createCmd.Flags().StringVar(&createReq.CredentialsId, "credentials-id", createReq.CredentialsId, `ID of the workspace's credential configuration object.`) - createCmd.Flags().StringVar(&createReq.DeploymentName, "deployment-name", createReq.DeploymentName, `The deployment name defines part of the subdomain for the workspace.`) - createCmd.Flags().StringVar(&createReq.Location, "location", createReq.Location, `The Google Cloud region of the workspace data plane in your Google account.`) - createCmd.Flags().StringVar(&createReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", createReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) - createCmd.Flags().StringVar(&createReq.NetworkId, "network-id", createReq.NetworkId, ``) - createCmd.Flags().Var(&createReq.PricingTier, "pricing-tier", `The pricing tier of the workspace.`) - createCmd.Flags().StringVar(&createReq.PrivateAccessSettingsId, "private-access-settings-id", createReq.PrivateAccessSettingsId, `ID of the workspace's private access settings object.`) - createCmd.Flags().StringVar(&createReq.StorageConfigurationId, "storage-configuration-id", createReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) - createCmd.Flags().StringVar(&createReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", createReq.StorageCustomerManagedKeyId, `The ID of the workspace's storage encryption key configuration object.`) + cmd.Flags().StringVar(&createReq.CredentialsId, "credentials-id", createReq.CredentialsId, `ID of the workspace's credential configuration object.`) + // TODO: map via StringToStringVar: custom_tags + cmd.Flags().StringVar(&createReq.DeploymentName, "deployment-name", createReq.DeploymentName, `The deployment name defines part of the subdomain for the workspace.`) + // TODO: complex arg: gcp_managed_network_config + // TODO: complex arg: gke_config + cmd.Flags().StringVar(&createReq.Location, "location", createReq.Location, `The Google Cloud region of the workspace data plane in your Google account.`) + cmd.Flags().StringVar(&createReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", createReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) + cmd.Flags().StringVar(&createReq.NetworkId, "network-id", createReq.NetworkId, ``) + cmd.Flags().Var(&createReq.PricingTier, "pricing-tier", `The pricing tier of the workspace.`) + cmd.Flags().StringVar(&createReq.PrivateAccessSettingsId, "private-access-settings-id", createReq.PrivateAccessSettingsId, `ID of the workspace's private access settings object.`) + cmd.Flags().StringVar(&createReq.StorageConfigurationId, "storage-configuration-id", createReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) + cmd.Flags().StringVar(&createReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", createReq.StorageCustomerManagedKeyId, `The ID of the workspace's storage encryption key configuration object.`) -} - -var createCmd = &cobra.Command{ - Use: "create WORKSPACE_NAME", - Short: `Create a new workspace.`, - Long: `Create a new workspace. + cmd.Use = "create WORKSPACE_NAME" + cmd.Short = `Create a new workspace.` + cmd.Long = `Create a new workspace. Creates a new workspace. @@ -72,18 +94,20 @@ var createCmd = &cobra.Command{ workspace status is typically PROVISIONING. Use the workspace ID (workspace_id) field in the response to identify the new workspace and make repeated GET requests with the workspace ID and check its status. The - workspace becomes available when the status changes to RUNNING.`, + workspace becomes available when the status changes to RUNNING.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -113,25 +137,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteWorkspaceRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteWorkspaceRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteWorkspaceRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete WORKSPACE_ID", - Short: `Delete a workspace.`, - Long: `Delete a workspace. + cmd.Use = "delete WORKSPACE_ID" + cmd.Short = `Delete a workspace.` + cmd.Long = `Delete a workspace. Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces @@ -140,11 +184,12 @@ var deleteCmd = &cobra.Command{ This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -175,25 +220,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetWorkspaceRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetWorkspaceRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetWorkspaceRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get WORKSPACE_ID", - Short: `Get a workspace.`, - Long: `Get a workspace. + cmd.Use = "get WORKSPACE_ID" + cmd.Short = `Get a workspace.` + cmd.Long = `Get a workspace. Gets information including status for a Databricks workspace, specified by ID. In the response, the workspace_status field indicates the current status. @@ -208,11 +273,12 @@ var getCmd = &cobra.Command{ platform or on a select custom plan that allows multiple workspaces per account. - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -243,33 +309,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all workspaces.`, - Long: `Get all workspaces. + cmd.Use = "list" + cmd.Short = `Get all workspaces.` + cmd.Long = `Get all workspaces. Gets a list of all workspaces associated with an account, specified by ID. This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Workspaces.List(ctx) @@ -277,38 +361,60 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq provisioning.UpdateWorkspaceRequest -var updateSkipWait bool -var updateTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *provisioning.UpdateWorkspaceRequest, +) -func init() { - Cmd.AddCommand(updateCmd) +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} - updateCmd.Flags().BoolVar(&updateSkipWait, "no-wait", updateSkipWait, `do not wait to reach RUNNING state`) - updateCmd.Flags().DurationVar(&updateTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var updateReq provisioning.UpdateWorkspaceRequest + var updateJson flags.JsonFlag + + var updateSkipWait bool + var updateTimeout time.Duration + + cmd.Flags().BoolVar(&updateSkipWait, "no-wait", updateSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&updateTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.AwsRegion, "aws-region", updateReq.AwsRegion, `The AWS region of the workspace's data plane (for example, us-west-2).`) - updateCmd.Flags().StringVar(&updateReq.CredentialsId, "credentials-id", updateReq.CredentialsId, `ID of the workspace's credential configuration object.`) - updateCmd.Flags().StringVar(&updateReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", updateReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) - updateCmd.Flags().StringVar(&updateReq.NetworkId, "network-id", updateReq.NetworkId, `The ID of the workspace's network configuration object.`) - updateCmd.Flags().StringVar(&updateReq.StorageConfigurationId, "storage-configuration-id", updateReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) - updateCmd.Flags().StringVar(&updateReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", updateReq.StorageCustomerManagedKeyId, `The ID of the key configuration object for workspace storage.`) + cmd.Flags().StringVar(&updateReq.AwsRegion, "aws-region", updateReq.AwsRegion, `The AWS region of the workspace's data plane (for example, us-west-2).`) + cmd.Flags().StringVar(&updateReq.CredentialsId, "credentials-id", updateReq.CredentialsId, `ID of the workspace's credential configuration object.`) + // TODO: map via StringToStringVar: custom_tags + cmd.Flags().StringVar(&updateReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", updateReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) + cmd.Flags().StringVar(&updateReq.NetworkId, "network-id", updateReq.NetworkId, `The ID of the workspace's network configuration object.`) + cmd.Flags().StringVar(&updateReq.StorageConfigurationId, "storage-configuration-id", updateReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) + cmd.Flags().StringVar(&updateReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", updateReq.StorageCustomerManagedKeyId, `The ID of the key configuration object for workspace storage.`) -} - -var updateCmd = &cobra.Command{ - Use: "update WORKSPACE_ID", - Short: `Update workspace configuration.`, - Long: `Update workspace configuration. + cmd.Use = "update WORKSPACE_ID" + cmd.Short = `Update workspace configuration.` + cmd.Long = `Update workspace configuration. Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use @@ -335,7 +441,8 @@ var updateCmd = &cobra.Command{ support. You can add or update the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end or back-end - PrivateLink support on a workspace. + PrivateLink support on a workspace. - Custom tags. Given you provide an empty + custom tags, the update would not be applied. After calling the PATCH operation to update the workspace configuration, make repeated GET requests with the workspace ID and check the workspace @@ -373,7 +480,8 @@ var updateCmd = &cobra.Command{ PrivateLink support. You can add or update the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end or back-end - PrivateLink support on a workspace. + PrivateLink support on a workspace. - Custom tags. Given you provide an empty + custom tags, the update would not be applied. **Important**: To update a running workspace, your workspace must have no running compute resources that run in your workspace's VPC in the Classic data @@ -420,14 +528,21 @@ var updateCmd = &cobra.Command{ account. [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } if len(args) == 0 { promptSpinner := cmdio.Spinner(ctx) promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." @@ -467,10 +582,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Workspaces diff --git a/cmd/api/api.go b/cmd/api/api.go index 563efa732..11a5e3e36 100644 --- a/cmd/api/api.go +++ b/cmd/api/api.go @@ -5,7 +5,6 @@ import ( "net/http" "strings" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" "github.com/databricks/databricks-sdk-go/client" @@ -13,9 +12,22 @@ import ( "github.com/spf13/cobra" ) -var apiCmd = &cobra.Command{ - Use: "api", - Short: "Perform Databricks API call", +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "api", + Short: "Perform Databricks API call", + } + + cmd.AddCommand( + makeCommand(http.MethodGet), + makeCommand(http.MethodHead), + makeCommand(http.MethodPost), + makeCommand(http.MethodPut), + makeCommand(http.MethodPatch), + makeCommand(http.MethodDelete), + ) + + return cmd } func makeCommand(method string) *cobra.Command { @@ -48,7 +60,8 @@ func makeCommand(method string) *cobra.Command { } var response any - err = api.Do(cmd.Context(), method, path, request, &response) + headers := map[string]string{"Content-Type": "application/json"} + err = api.Do(cmd.Context(), method, path, headers, request, &response) if err != nil { return err } @@ -59,15 +72,3 @@ func makeCommand(method string) *cobra.Command { command.Flags().Var(&payload, "json", `either inline JSON string or @path/to/file.json with request body`) return command } - -func init() { - apiCmd.AddCommand( - makeCommand(http.MethodGet), - makeCommand(http.MethodHead), - makeCommand(http.MethodPost), - makeCommand(http.MethodPut), - makeCommand(http.MethodPatch), - makeCommand(http.MethodDelete), - ) - root.RootCmd.AddCommand(apiCmd) -} diff --git a/cmd/auth/auth.go b/cmd/auth/auth.go index b7e8d2d78..e0c7c7c5b 100644 --- a/cmd/auth/auth.go +++ b/cmd/auth/auth.go @@ -3,18 +3,27 @@ package auth import ( "context" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/cmdio" "github.com/spf13/cobra" ) -var authCmd = &cobra.Command{ - Use: "auth", - Short: "Authentication related commands", -} +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "auth", + Short: "Authentication related commands", + } -var persistentAuth auth.PersistentAuth + var perisistentAuth auth.PersistentAuth + cmd.PersistentFlags().StringVar(&perisistentAuth.Host, "host", perisistentAuth.Host, "Databricks Host") + cmd.PersistentFlags().StringVar(&perisistentAuth.AccountID, "account-id", perisistentAuth.AccountID, "Databricks Account ID") + + cmd.AddCommand(newEnvCommand()) + cmd.AddCommand(newLoginCommand(&perisistentAuth)) + cmd.AddCommand(newProfilesCommand()) + cmd.AddCommand(newTokenCommand(&perisistentAuth)) + return cmd +} func promptForHost(ctx context.Context) (string, error) { prompt := cmdio.Prompt(ctx) @@ -41,9 +50,3 @@ func promptForAccountID(ctx context.Context) (string, error) { } return accountId, nil } - -func init() { - root.RootCmd.AddCommand(authCmd) - authCmd.PersistentFlags().StringVar(&persistentAuth.Host, "host", persistentAuth.Host, "Databricks Host") - authCmd.PersistentFlags().StringVar(&persistentAuth.AccountID, "account-id", persistentAuth.AccountID, "Databricks Account ID") -} diff --git a/cmd/auth/env.go b/cmd/auth/env.go index e288c576c..241d5f880 100644 --- a/cmd/auth/env.go +++ b/cmd/auth/env.go @@ -9,6 +9,7 @@ import ( "net/url" "strings" + "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go/config" "github.com/spf13/cobra" "gopkg.in/ini.v1" @@ -28,7 +29,7 @@ func canonicalHost(host string) (string, error) { var ErrNoMatchingProfiles = errors.New("no matching profiles found") -func resolveSection(cfg *config.Config, iniFile *ini.File) (*ini.Section, error) { +func resolveSection(cfg *config.Config, iniFile *config.File) (*ini.Section, error) { var candidates []*ini.Section configuredHost, err := canonicalHost(cfg.Host) if err != nil { @@ -68,7 +69,7 @@ func resolveSection(cfg *config.Config, iniFile *ini.File) (*ini.Section, error) } func loadFromDatabricksCfg(cfg *config.Config) error { - iniFile, err := getDatabricksCfg() + iniFile, err := databrickscfg.Get() if errors.Is(err, fs.ErrNotExist) { // it's fine not to have ~/.databrickscfg return nil @@ -89,10 +90,18 @@ func loadFromDatabricksCfg(cfg *config.Config) error { return nil } -var envCmd = &cobra.Command{ - Use: "env", - Short: "Get env", - RunE: func(cmd *cobra.Command, args []string) error { +func newEnvCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "env", + Short: "Get env", + } + + var host string + var profile string + cmd.Flags().StringVar(&host, "host", host, "Hostname to get auth env for") + cmd.Flags().StringVar(&profile, "profile", profile, "Profile to get auth env for") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { cfg := &config.Config{ Host: host, Profile: profile, @@ -130,14 +139,7 @@ var envCmd = &cobra.Command{ } cmd.OutOrStdout().Write(raw) return nil - }, -} + } -var host string -var profile string - -func init() { - authCmd.AddCommand(envCmd) - envCmd.Flags().StringVar(&host, "host", host, "Hostname to get auth env for") - envCmd.Flags().StringVar(&profile, "profile", profile, "Profile to get auth env for") + return cmd } diff --git a/cmd/auth/login.go b/cmd/auth/login.go index 37d44c084..cf1d5c301 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -14,10 +14,7 @@ import ( "github.com/spf13/cobra" ) -var loginTimeout time.Duration -var configureCluster bool - -func configureHost(ctx context.Context, args []string, argIndex int) error { +func configureHost(ctx context.Context, persistentAuth *auth.PersistentAuth, args []string, argIndex int) error { if len(args) > argIndex { persistentAuth.Host = args[argIndex] return nil @@ -31,13 +28,51 @@ func configureHost(ctx context.Context, args []string, argIndex int) error { return nil } -var loginCmd = &cobra.Command{ - Use: "login [HOST]", - Short: "Authenticate this machine", - RunE: func(cmd *cobra.Command, args []string) error { +func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { + cmd := &cobra.Command{ + Use: "login [HOST]", + Short: "Authenticate this machine", + } + + var loginTimeout time.Duration + var configureCluster bool + cmd.Flags().DurationVar(&loginTimeout, "timeout", auth.DefaultTimeout, + "Timeout for completing login challenge in the browser") + cmd.Flags().BoolVar(&configureCluster, "configure-cluster", false, + "Prompts to configure cluster") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() + + var profileName string + profileFlag := cmd.Flag("profile") + if profileFlag != nil && profileFlag.Value.String() != "" { + profileName = profileFlag.Value.String() + } else { + prompt := cmdio.Prompt(ctx) + prompt.Label = "Databricks Profile Name" + prompt.Default = persistentAuth.ProfileName() + prompt.AllowEdit = true + profile, err := prompt.Run() + if err != nil { + return err + } + profileName = profile + } + + // If the chosen profile has a hostname and the user hasn't specified a host, infer the host from the profile. + _, profiles, err := databrickscfg.LoadProfiles(func(p databrickscfg.Profile) bool { + return p.Name == profileName + }) + if err != nil { + return err + } if persistentAuth.Host == "" { - configureHost(ctx, args, 0) + if len(profiles) > 0 && profiles[0].Host != "" { + persistentAuth.Host = profiles[0].Host + } else { + configureHost(ctx, persistentAuth, args, 0) + } } defer persistentAuth.Close() @@ -59,22 +94,7 @@ var loginCmd = &cobra.Command{ ctx, cancel := context.WithTimeout(ctx, loginTimeout) defer cancel() - var profileName string - profileFlag := cmd.Flag("profile") - if profileFlag != nil && profileFlag.Value.String() != "" { - profileName = profileFlag.Value.String() - } else { - prompt := cmdio.Prompt(ctx) - prompt.Label = "Databricks Profile Name" - prompt.Default = persistentAuth.ProfileName() - prompt.AllowEdit = true - profile, err := prompt.Run() - if err != nil { - return err - } - profileName = profile - } - err := persistentAuth.Challenge(ctx) + err = persistentAuth.Challenge(ctx) if err != nil { return err } @@ -108,14 +128,7 @@ var loginCmd = &cobra.Command{ cmdio.LogString(ctx, fmt.Sprintf("Profile %s was successfully saved", profileName)) return nil - }, -} + } -func init() { - authCmd.AddCommand(loginCmd) - loginCmd.Flags().DurationVar(&loginTimeout, "timeout", auth.DefaultTimeout, - "Timeout for completing login challenge in the browser") - - loginCmd.Flags().BoolVar(&configureCluster, "configure-cluster", false, - "Prompts to configure cluster") + return cmd } diff --git a/cmd/auth/profiles.go b/cmd/auth/profiles.go index d3b167b77..97d8eeabc 100644 --- a/cmd/auth/profiles.go +++ b/cmd/auth/profiles.go @@ -5,32 +5,16 @@ import ( "fmt" "net/http" "os" - "path/filepath" - "strings" "sync" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/config" "github.com/spf13/cobra" "gopkg.in/ini.v1" ) -func getDatabricksCfg() (*ini.File, error) { - configFile := os.Getenv("DATABRICKS_CONFIG_FILE") - if configFile == "" { - configFile = "~/.databrickscfg" - } - if strings.HasPrefix(configFile, "~") { - homedir, err := os.UserHomeDir() - if err != nil { - return nil, fmt.Errorf("cannot find homedir: %w", err) - } - configFile = filepath.Join(homedir, configFile[1:]) - } - return ini.Load(configFile) -} - type profileMetadata struct { Name string `json:"name"` Host string `json:"host,omitempty"` @@ -44,7 +28,7 @@ func (c *profileMetadata) IsEmpty() bool { return c.Host == "" && c.AccountID == "" } -func (c *profileMetadata) Load(ctx context.Context) { +func (c *profileMetadata) Load(ctx context.Context, skipValidate bool) { // TODO: disable config loaders other than configfile cfg := &config.Config{Profile: c.Name} _ = cfg.EnsureResolved() @@ -94,21 +78,29 @@ func (c *profileMetadata) Load(ctx context.Context) { c.Host = cfg.Host } -var profilesCmd = &cobra.Command{ - Use: "profiles", - Short: "Lists profiles from ~/.databrickscfg", - Annotations: map[string]string{ - "template": cmdio.Heredoc(` - {{header "Name"}} {{header "Host"}} {{header "Valid"}} - {{range .Profiles}}{{.Name | green}} {{.Host|cyan}} {{bool .Valid}} - {{end}}`), - }, - RunE: func(cmd *cobra.Command, args []string) error { +func newProfilesCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "profiles", + Short: "Lists profiles from ~/.databrickscfg", + Annotations: map[string]string{ + "template": cmdio.Heredoc(` + {{header "Name"}} {{header "Host"}} {{header "Valid"}} + {{range .Profiles}}{{.Name | green}} {{.Host|cyan}} {{bool .Valid}} + {{end}}`), + }, + } + + var skipValidate bool + cmd.Flags().BoolVar(&skipValidate, "skip-validate", false, "Whether to skip validating the profiles") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { var profiles []*profileMetadata - iniFile, err := getDatabricksCfg() + iniFile, err := databrickscfg.Get() if os.IsNotExist(err) { // return empty list for non-configured machines - iniFile = ini.Empty() + iniFile = &config.File{ + File: &ini.File{}, + } } else if err != nil { return fmt.Errorf("cannot parse config file: %w", err) } @@ -126,7 +118,7 @@ var profilesCmd = &cobra.Command{ wg.Add(1) go func() { // load more information about profile - profile.Load(cmd.Context()) + profile.Load(cmd.Context(), skipValidate) wg.Done() }() profiles = append(profiles, profile) @@ -135,12 +127,7 @@ var profilesCmd = &cobra.Command{ return cmdio.Render(cmd.Context(), struct { Profiles []*profileMetadata `json:"profiles"` }{profiles}) - }, -} + } -var skipValidate bool - -func init() { - authCmd.AddCommand(profilesCmd) - profilesCmd.Flags().BoolVar(&skipValidate, "skip-validate", false, "Whether to skip validating the profiles") + return cmd } diff --git a/cmd/auth/token.go b/cmd/auth/token.go index 1b8d8b131..242a3dabe 100644 --- a/cmd/auth/token.go +++ b/cmd/auth/token.go @@ -9,15 +9,20 @@ import ( "github.com/spf13/cobra" ) -var tokenTimeout time.Duration +func newTokenCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { + cmd := &cobra.Command{ + Use: "token [HOST]", + Short: "Get authentication token", + } -var tokenCmd = &cobra.Command{ - Use: "token [HOST]", - Short: "Get authentication token", - RunE: func(cmd *cobra.Command, args []string) error { + var tokenTimeout time.Duration + cmd.Flags().DurationVar(&tokenTimeout, "timeout", auth.DefaultTimeout, + "Timeout for acquiring a token.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() if persistentAuth.Host == "" { - configureHost(ctx, args, 0) + configureHost(ctx, persistentAuth, args, 0) } defer persistentAuth.Close() @@ -33,11 +38,7 @@ var tokenCmd = &cobra.Command{ } cmd.OutOrStdout().Write(raw) return nil - }, -} + } -func init() { - authCmd.AddCommand(tokenCmd) - tokenCmd.Flags().DurationVar(&tokenTimeout, "timeout", auth.DefaultTimeout, - "Timeout for acquiring a token.") + return cmd } diff --git a/cmd/bundle/bundle.go b/cmd/bundle/bundle.go new file mode 100644 index 000000000..d8382d172 --- /dev/null +++ b/cmd/bundle/bundle.go @@ -0,0 +1,24 @@ +package bundle + +import ( + "github.com/spf13/cobra" +) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "bundle", + Short: "Databricks Asset Bundles\n\nOnline documentation: https://docs.databricks.com/en/dev-tools/bundles", + } + + initVariableFlag(cmd) + cmd.AddCommand(newDeployCommand()) + cmd.AddCommand(newDestroyCommand()) + cmd.AddCommand(newLaunchCommand()) + cmd.AddCommand(newRunCommand()) + cmd.AddCommand(newSchemaCommand()) + cmd.AddCommand(newSyncCommand()) + cmd.AddCommand(newTestCommand()) + cmd.AddCommand(newValidateCommand()) + cmd.AddCommand(newInitCommand()) + return cmd +} diff --git a/cmd/bundle/debug/debug.go b/cmd/bundle/debug/debug.go deleted file mode 100644 index fdc894ef1..000000000 --- a/cmd/bundle/debug/debug.go +++ /dev/null @@ -1,19 +0,0 @@ -package debug - -import ( - "github.com/spf13/cobra" - - parent "github.com/databricks/cli/cmd/bundle" -) - -var debugCmd = &cobra.Command{ - Use: "debug", -} - -func AddCommand(cmd *cobra.Command) { - debugCmd.AddCommand(cmd) -} - -func init() { - parent.AddCommand(debugCmd) -} diff --git a/cmd/bundle/debug/whoami.go b/cmd/bundle/debug/whoami.go deleted file mode 100644 index 95d97eeb5..000000000 --- a/cmd/bundle/debug/whoami.go +++ /dev/null @@ -1,30 +0,0 @@ -package debug - -import ( - "fmt" - - "github.com/databricks/cli/bundle" - bundleCmd "github.com/databricks/cli/cmd/bundle" - "github.com/spf13/cobra" -) - -var whoamiCmd = &cobra.Command{ - Use: "whoami", - - PreRunE: bundleCmd.ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { - ctx := cmd.Context() - w := bundle.Get(ctx).WorkspaceClient() - user, err := w.CurrentUser.Me(ctx) - if err != nil { - return err - } - - fmt.Fprintln(cmd.OutOrStdout(), user.UserName) - return nil - }, -} - -func init() { - debugCmd.AddCommand(whoamiCmd) -} diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index e8c0d3958..8818bbbf4 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -6,16 +6,25 @@ import ( "github.com/spf13/cobra" ) -var deployCmd = &cobra.Command{ - Use: "deploy", - Short: "Deploy bundle", +func newDeployCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "deploy", + Short: "Deploy bundle", + PreRunE: ConfigureBundleWithVariables, + } - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + var force bool + var forceLock bool + var computeID string + cmd.Flags().BoolVar(&force, "force", false, "Force-override Git branch validation.") + cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") + cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) - // If `--force` is specified, force acquisition of the deployment lock. - b.Config.Bundle.Lock.Force = forceDeploy + b.Config.Bundle.Force = force + b.Config.Bundle.Lock.Force = forceLock b.Config.Bundle.ComputeID = computeID return bundle.Apply(cmd.Context(), b, bundle.Seq( @@ -23,14 +32,7 @@ var deployCmd = &cobra.Command{ phases.Build(), phases.Deploy(), )) - }, -} + } -var forceDeploy bool -var computeID string - -func init() { - AddCommand(deployCmd) - deployCmd.Flags().BoolVar(&forceDeploy, "force", false, "Force acquisition of deployment lock.") - deployCmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") + return cmd } diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index d0fe699a0..22d998abe 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -12,16 +12,24 @@ import ( "golang.org/x/term" ) -var destroyCmd = &cobra.Command{ - Use: "destroy", - Short: "Destroy deployed bundle resources", +func newDestroyCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "destroy", + Short: "Destroy deployed bundle resources", - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: ConfigureBundleWithVariables, + } + + var autoApprove bool + var forceDestroy bool + cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals for deleting resources and files") + cmd.Flags().BoolVar(&forceDestroy, "force-lock", false, "Force acquisition of deployment lock.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() b := bundle.Get(ctx) - // If `--force` is specified, force acquisition of the deployment lock. + // If `--force-lock` is specified, force acquisition of the deployment lock. b.Config.Bundle.Lock.Force = forceDestroy // If `--auto-approve`` is specified, we skip confirmation checks @@ -47,14 +55,7 @@ var destroyCmd = &cobra.Command{ phases.Build(), phases.Destroy(), )) - }, -} + } -var autoApprove bool -var forceDestroy bool - -func init() { - AddCommand(destroyCmd) - destroyCmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals for deleting resources and files") - destroyCmd.Flags().BoolVar(&forceDestroy, "force", false, "Force acquisition of deployment lock.") + return cmd } diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go new file mode 100644 index 000000000..3038cb7a2 --- /dev/null +++ b/cmd/bundle/init.go @@ -0,0 +1,94 @@ +package bundle + +import ( + "errors" + "os" + "path/filepath" + "strings" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/git" + "github.com/databricks/cli/libs/template" + "github.com/spf13/cobra" +) + +var gitUrlPrefixes = []string{ + "https://", + "git@", +} + +func isRepoUrl(url string) bool { + result := false + for _, prefix := range gitUrlPrefixes { + if strings.HasPrefix(url, prefix) { + result = true + break + } + } + return result +} + +// Computes the repo name from the repo URL. Treats the last non empty word +// when splitting at '/' as the repo name. For example: for url git@github.com:databricks/cli.git +// the name would be "cli.git" +func repoName(url string) string { + parts := strings.Split(strings.TrimRight(url, "/"), "/") + return parts[len(parts)-1] +} + +func newInitCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "init [TEMPLATE_PATH]", + Short: "Initialize Template", + Args: cobra.MaximumNArgs(1), + } + + var configFile string + var outputDir string + var templateDir string + cmd.Flags().StringVar(&configFile, "config-file", "", "File containing input parameters for template initialization.") + cmd.Flags().StringVar(&templateDir, "template-dir", "", "Directory within repository that holds the template specification.") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to.") + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + var templatePath string + if len(args) > 0 { + templatePath = args[0] + } else { + var err error + if !cmdio.IsOutTTY(ctx) || !cmdio.IsInTTY(ctx) { + return errors.New("please specify a template") + } + templatePath, err = cmdio.Ask(ctx, "Template to use", "default-python") + if err != nil { + return err + } + } + + if !isRepoUrl(templatePath) { + // skip downloading the repo because input arg is not a URL. We assume + // it's a path on the local file system in that case + return template.Materialize(ctx, configFile, templatePath, outputDir) + } + + // Create a temporary directory with the name of the repository. The '*' + // character is replaced by a random string in the generated temporary directory. + repoDir, err := os.MkdirTemp("", repoName(templatePath)+"-*") + if err != nil { + return err + } + // TODO: Add automated test that the downloaded git repo is cleaned up. + // Clone the repository in the temporary directory + err = git.Clone(ctx, templatePath, "", repoDir) + if err != nil { + return err + } + // Clean up downloaded repository once the template is materialized. + defer os.RemoveAll(repoDir) + return template.Materialize(ctx, configFile, filepath.Join(repoDir, templateDir), outputDir) + } + return cmd +} diff --git a/cmd/bundle/init_test.go b/cmd/bundle/init_test.go new file mode 100644 index 000000000..4a795160e --- /dev/null +++ b/cmd/bundle/init_test.go @@ -0,0 +1,27 @@ +package bundle + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBundleInitIsRepoUrl(t *testing.T) { + assert.True(t, isRepoUrl("git@github.com:databricks/cli.git")) + assert.True(t, isRepoUrl("https://github.com/databricks/cli.git")) + + assert.False(t, isRepoUrl("./local")) + assert.False(t, isRepoUrl("foo")) +} + +func TestBundleInitRepoName(t *testing.T) { + // Test valid URLs + assert.Equal(t, "cli.git", repoName("git@github.com:databricks/cli.git")) + assert.Equal(t, "cli", repoName("https://github.com/databricks/cli/")) + + // test invalid URLs. In these cases the error would be floated when the + // git clone operation fails. + assert.Equal(t, "git@github.com:databricks", repoName("git@github.com:databricks")) + assert.Equal(t, "invalid-url", repoName("invalid-url")) + assert.Equal(t, "www.github.com", repoName("https://www.github.com")) +} diff --git a/cmd/bundle/launch.go b/cmd/bundle/launch.go index ae44352e3..bbb43600a 100644 --- a/cmd/bundle/launch.go +++ b/cmd/bundle/launch.go @@ -7,17 +7,20 @@ import ( "github.com/spf13/cobra" ) -var launchCmd = &cobra.Command{ - Use: "launch", - Short: "Launches a notebook on development cluster", - Long: `Reads a file and executes it on dev cluster`, - Args: cobra.ExactArgs(1), +func newLaunchCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "launch", + Short: "Launches a notebook on development cluster", + Long: `Reads a file and executes it on dev cluster`, + Args: cobra.ExactArgs(1), - // We're not ready to expose this command until we specify its semantics. - Hidden: true, + // We're not ready to expose this command until we specify its semantics. + Hidden: true, - PreRunE: root.MustConfigureBundle, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: root.MustConfigureBundle, + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { return fmt.Errorf("TODO") // contents, err := os.ReadFile(args[0]) // if err != nil { @@ -29,9 +32,7 @@ var launchCmd = &cobra.Command{ // } // fmt.Fprintf(cmd.OutOrStdout(), "Success: %s", results.Text()) // return nil - }, -} + } -func init() { - AddCommand(launchCmd) + return cmd } diff --git a/cmd/bundle/root.go b/cmd/bundle/root.go deleted file mode 100644 index 395ed3837..000000000 --- a/cmd/bundle/root.go +++ /dev/null @@ -1,23 +0,0 @@ -package bundle - -import ( - "github.com/databricks/cli/cmd/root" - "github.com/spf13/cobra" -) - -// rootCmd represents the root command for the bundle subcommand. -var rootCmd = &cobra.Command{ - Use: "bundle", - Short: "Databricks Asset Bundles", -} - -func AddCommand(cmd *cobra.Command) { - rootCmd.AddCommand(cmd) -} - -var variables []string - -func init() { - root.RootCmd.AddCommand(rootCmd) - AddVariableFlag(rootCmd) -} diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 9ca8fe456..b5a60ee15 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -9,23 +9,31 @@ import ( "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/run" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" ) -var runOptions run.Options -var noWait bool +func newRunCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "run [flags] KEY", + Short: "Run a resource (e.g. a job or a pipeline)", -var runCmd = &cobra.Command{ - Use: "run [flags] KEY", - Short: "Run a workload (e.g. a job or a pipeline)", + Args: cobra.MaximumNArgs(1), + PreRunE: ConfigureBundleWithVariables, + } - Args: cobra.ExactArgs(1), - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) + var runOptions run.Options + runOptions.Define(cmd.Flags()) - err := bundle.Apply(cmd.Context(), b, bundle.Seq( + var noWait bool + cmd.Flags().BoolVar(&noWait, "no-wait", false, "Don't wait for the run to complete.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + b := bundle.Get(ctx) + + err := bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), terraform.Interpolate(), terraform.Write(), @@ -36,18 +44,36 @@ var runCmd = &cobra.Command{ return err } + // If no arguments are specified, prompt the user to select something to run. + if len(args) == 0 && cmdio.IsInteractive(ctx) { + // Invert completions from KEY -> NAME, to NAME -> KEY. + inv := make(map[string]string) + for k, v := range run.ResourceCompletionMap(b) { + inv[v] = k + } + id, err := cmdio.Select(ctx, inv, "Resource to run") + if err != nil { + return err + } + args = append(args, id) + } + + if len(args) != 1 { + return fmt.Errorf("expected a KEY of the resource to run") + } + runner, err := run.Find(b, args[0]) if err != nil { return err } runOptions.NoWait = noWait - output, err := runner.Run(cmd.Context(), &runOptions) + output, err := runner.Run(ctx, &runOptions) if err != nil { return err } if output != nil { - switch root.OutputType() { + switch root.OutputType(cmd) { case flags.OutputText: resultString, err := output.String() if err != nil { @@ -61,13 +87,13 @@ var runCmd = &cobra.Command{ } cmd.OutOrStdout().Write(b) default: - return fmt.Errorf("unknown output type %s", root.OutputType()) + return fmt.Errorf("unknown output type %s", root.OutputType(cmd)) } } return nil - }, + } - ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { if len(args) > 0 { return nil, cobra.ShellCompDirectiveNoFileComp } @@ -86,11 +112,7 @@ var runCmd = &cobra.Command{ } return run.ResourceCompletions(b), cobra.ShellCompDirectiveNoFileComp - }, -} + } -func init() { - runOptions.Define(runCmd.Flags()) - rootCmd.AddCommand(runCmd) - runCmd.Flags().BoolVar(&noWait, "no-wait", false, "Don't wait for the run to complete.") + return cmd } diff --git a/cmd/bundle/schema.go b/cmd/bundle/schema.go index b288d78e9..8b2c0177b 100644 --- a/cmd/bundle/schema.go +++ b/cmd/bundle/schema.go @@ -9,11 +9,18 @@ import ( "github.com/spf13/cobra" ) -var schemaCmd = &cobra.Command{ - Use: "schema", - Short: "Generate JSON Schema for bundle configuration", +func newSchemaCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "schema", + Short: "Generate JSON Schema for bundle configuration", + } - RunE: func(cmd *cobra.Command, args []string) error { + var openapi string + var onlyDocs bool + cmd.Flags().StringVar(&openapi, "openapi", "", "path to a databricks openapi spec") + cmd.Flags().BoolVar(&onlyDocs, "only-docs", false, "only generate descriptions for the schema") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { docs, err := schema.BundleDocs(openapi) if err != nil { return err @@ -34,14 +41,7 @@ var schemaCmd = &cobra.Command{ } cmd.OutOrStdout().Write(result) return nil - }, -} + } -var openapi string -var onlyDocs bool - -func init() { - AddCommand(schemaCmd) - schemaCmd.Flags().StringVar(&openapi, "openapi", "", "path to a databricks openapi spec") - schemaCmd.Flags().BoolVar(&onlyDocs, "only-docs", false, "only generate descriptions for the schema") + return cmd } diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 19adc2dd6..6d6a6f5a3 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -11,17 +11,30 @@ import ( "github.com/spf13/cobra" ) -func syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOptions, error) { - cacheDir, err := b.CacheDir() +type syncFlags struct { + interval time.Duration + full bool + watch bool +} + +func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOptions, error) { + cacheDir, err := b.CacheDir(cmd.Context()) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } + includes, err := b.GetSyncIncludePatterns(cmd.Context()) + if err != nil { + return nil, fmt.Errorf("cannot get list of sync includes: %w", err) + } + opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, - Full: full, - PollInterval: interval, + Include: includes, + Exclude: b.Config.Sync.Exclude, + Full: f.full, + PollInterval: f.interval, SnapshotBasePath: cacheDir, WorkspaceClient: b.WorkspaceClient(), @@ -29,13 +42,21 @@ func syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOpti return &opts, nil } -var syncCmd = &cobra.Command{ - Use: "sync [flags]", - Short: "Synchronize bundle tree to the workspace", - Args: cobra.NoArgs, +func newSyncCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "sync [flags]", + Short: "Synchronize bundle tree to the workspace", + Args: cobra.NoArgs, - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: ConfigureBundleWithVariables, + } + + var f syncFlags + cmd.Flags().DurationVar(&f.interval, "interval", 1*time.Second, "file system polling interval (for --watch)") + cmd.Flags().BoolVar(&f.full, "full", false, "perform full synchronization (default is incremental)") + cmd.Flags().BoolVar(&f.watch, "watch", false, "watch local file system for changes") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) // Run initialize phase to make sure paths are set. @@ -44,7 +65,7 @@ var syncCmd = &cobra.Command{ return err } - opts, err := syncOptionsFromBundle(cmd, b) + opts, err := f.syncOptionsFromBundle(cmd, b) if err != nil { return err } @@ -57,21 +78,12 @@ var syncCmd = &cobra.Command{ log.Infof(ctx, "Remote file sync location: %v", opts.RemotePath) - if watch { + if f.watch { return s.RunContinuous(ctx) } return s.RunOnce(ctx) - }, -} + } -var interval time.Duration -var full bool -var watch bool - -func init() { - AddCommand(syncCmd) - syncCmd.Flags().DurationVar(&interval, "interval", 1*time.Second, "file system polling interval (for --watch)") - syncCmd.Flags().BoolVar(&full, "full", false, "perform full synchronization (default is incremental)") - syncCmd.Flags().BoolVar(&watch, "watch", false, "watch local file system for changes") + return cmd } diff --git a/cmd/bundle/test.go b/cmd/bundle/test.go index ec36f18a1..ea1a4b716 100644 --- a/cmd/bundle/test.go +++ b/cmd/bundle/test.go @@ -7,16 +7,19 @@ import ( "github.com/spf13/cobra" ) -var testCmd = &cobra.Command{ - Use: "test", - Short: "run tests for the project", - Long: `This is longer description of the command`, +func newTestCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "test", + Short: "run tests for the project", + Long: `This is longer description of the command`, - // We're not ready to expose this command until we specify its semantics. - Hidden: true, + // We're not ready to expose this command until we specify its semantics. + Hidden: true, - PreRunE: root.MustConfigureBundle, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: root.MustConfigureBundle, + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { return fmt.Errorf("TODO") // results := project.RunPythonOnDev(cmd.Context(), `return 1`) // if results.Failed() { @@ -24,9 +27,7 @@ var testCmd = &cobra.Command{ // } // fmt.Fprintf(cmd.OutOrStdout(), "Success: %s", results.Text()) // return nil - }, -} + } -func init() { - AddCommand(testCmd) + return cmd } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index 65ab38905..b98cbd52d 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -8,12 +8,15 @@ import ( "github.com/spf13/cobra" ) -var validateCmd = &cobra.Command{ - Use: "validate", - Short: "Validate configuration", +func newValidateCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "validate", + Short: "Validate configuration", - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: ConfigureBundleWithVariables, + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) err := bundle.Apply(cmd.Context(), b, phases.Initialize()) @@ -27,9 +30,7 @@ var validateCmd = &cobra.Command{ } cmd.OutOrStdout().Write(buf) return nil - }, -} + } -func init() { - AddCommand(validateCmd) + return cmd } diff --git a/cmd/bundle/variables.go b/cmd/bundle/variables.go index b1ab74fe5..c3e4af645 100644 --- a/cmd/bundle/variables.go +++ b/cmd/bundle/variables.go @@ -7,17 +7,22 @@ import ( ) func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { - // Load bundle config and apply environment + // Load bundle config and apply target err := root.MustConfigureBundle(cmd, args) if err != nil { return err } + variables, err := cmd.Flags().GetStringSlice("var") + if err != nil { + return err + } + // Initialize variables by assigning them values passed as command line flags b := bundle.Get(cmd.Context()) return b.Config.InitializeVariables(variables) } -func AddVariableFlag(cmd *cobra.Command) { - cmd.PersistentFlags().StringSliceVar(&variables, "var", []string{}, `set values for variables defined in bundle config. Example: --var="foo=bar"`) +func initVariableFlag(cmd *cobra.Command) { + cmd.PersistentFlags().StringSlice("var", []string{}, `set values for variables defined in bundle config. Example: --var="foo=bar"`) } diff --git a/cmd/cmd.go b/cmd/cmd.go new file mode 100644 index 000000000..6dd0f6e21 --- /dev/null +++ b/cmd/cmd.go @@ -0,0 +1,77 @@ +package cmd + +import ( + "context" + "strings" + + "github.com/databricks/cli/cmd/account" + "github.com/databricks/cli/cmd/api" + "github.com/databricks/cli/cmd/auth" + "github.com/databricks/cli/cmd/bundle" + "github.com/databricks/cli/cmd/configure" + "github.com/databricks/cli/cmd/fs" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd/sync" + "github.com/databricks/cli/cmd/version" + "github.com/databricks/cli/cmd/workspace" + "github.com/spf13/cobra" +) + +const ( + mainGroup = "main" + permissionsGroup = "permissions" +) + +func New(ctx context.Context) *cobra.Command { + cli := root.New(ctx) + + // Add account subcommand. + cli.AddCommand(account.New()) + + // Add workspace subcommands. + for _, cmd := range workspace.All() { + // Built-in groups for the workspace commands. + groups := []cobra.Group{ + { + ID: mainGroup, + Title: "Available Commands", + }, + { + ID: permissionsGroup, + Title: "Permission Commands", + }, + } + for i := range groups { + cmd.AddGroup(&groups[i]) + } + + // Order the permissions subcommands after the main commands. + for _, sub := range cmd.Commands() { + switch { + case strings.HasSuffix(sub.Name(), "-permissions"), strings.HasSuffix(sub.Name(), "-permission-levels"): + sub.GroupID = permissionsGroup + default: + sub.GroupID = mainGroup + } + } + + cli.AddCommand(cmd) + } + + // Add workspace command groups. + groups := workspace.Groups() + for i := range groups { + cli.AddGroup(&groups[i]) + } + + // Add other subcommands. + cli.AddCommand(api.New()) + cli.AddCommand(auth.New()) + cli.AddCommand(bundle.New()) + cli.AddCommand(configure.New()) + cli.AddCommand(fs.New()) + cli.AddCommand(sync.New()) + cli.AddCommand(version.New()) + + return cli +} diff --git a/cmd/configure/configure.go b/cmd/configure/configure.go index 14101d593..0c1e40521 100644 --- a/cmd/configure/configure.go +++ b/cmd/configure/configure.go @@ -5,7 +5,6 @@ import ( "fmt" "net/url" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go/config" @@ -112,19 +111,30 @@ func configureNonInteractive(cmd *cobra.Command, ctx context.Context, cfg *confi return nil } -var configureCmd = &cobra.Command{ - Use: "configure", - Short: "Configure authentication", - Long: `Configure authentication. +func newConfigureCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "configure", + Short: "Configure authentication", + Long: `Configure authentication. -This command adds a profile to your ~/.databrickscfg file. -You can write to a different file by setting the DATABRICKS_CONFIG_FILE environment variable. + This command adds a profile to your ~/.databrickscfg file. + You can write to a different file by setting the DATABRICKS_CONFIG_FILE environment variable. -If this command is invoked in non-interactive mode, it will read the token from stdin. -The host must be specified with the --host flag. - `, - Hidden: true, - RunE: func(cmd *cobra.Command, args []string) error { + If this command is invoked in non-interactive mode, it will read the token from stdin. + The host must be specified with the --host flag. + `, + Hidden: true, + } + + cmd.Flags().String("host", "", "Databricks workspace host.") + cmd.Flags().String("profile", "DEFAULT", "Name for the connection profile to configure.") + + // Include token flag for compatibility with the legacy CLI. + // It doesn't actually do anything because we always use PATs. + cmd.Flags().Bool("token", true, "Configure using Databricks Personal Access Token") + cmd.Flags().MarkHidden("token") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { var cfg config.Config // Load environment variables, possibly the DEFAULT profile. @@ -152,16 +162,11 @@ The host must be specified with the --host flag. // Save profile to config file. return databrickscfg.SaveToProfile(ctx, &cfg) - }, + } + + return cmd } -func init() { - root.RootCmd.AddCommand(configureCmd) - configureCmd.Flags().String("host", "", "Databricks workspace host.") - configureCmd.Flags().String("profile", "DEFAULT", "Name for the connection profile to configure.") - - // Include token flag for compatibility with the legacy CLI. - // It doesn't actually do anything because we always use PATs. - configureCmd.Flags().BoolP("token", "t", true, "Configure using Databricks Personal Access Token") - configureCmd.Flags().MarkHidden("token") +func New() *cobra.Command { + return newConfigureCommand() } diff --git a/cmd/configure/configure_test.go b/cmd/configure/configure_test.go index 0dbf114d6..cf0505edd 100644 --- a/cmd/configure/configure_test.go +++ b/cmd/configure/configure_test.go @@ -1,4 +1,4 @@ -package configure +package configure_test import ( "context" @@ -7,15 +7,16 @@ import ( "runtime" "testing" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd" "github.com/stretchr/testify/assert" "gopkg.in/ini.v1" ) func assertKeyValueInSection(t *testing.T, section *ini.Section, keyName, expectedValue string) { key, err := section.GetKey(keyName) - assert.NoError(t, err) - assert.Equal(t, key.Value(), expectedValue) + if assert.NoError(t, err) { + assert.Equal(t, expectedValue, key.Value()) + } } func setup(t *testing.T) string { @@ -26,6 +27,7 @@ func setup(t *testing.T) string { } t.Setenv(homeEnvVar, tempHomeDir) t.Setenv("DATABRICKS_CONFIG_FILE", "") + t.Setenv("DATABRICKS_TOKEN", "") return tempHomeDir } @@ -52,9 +54,10 @@ func TestDefaultConfigureNoInteractive(t *testing.T) { }) os.Stdin = inp - root.RootCmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) + cmd := cmd.New(ctx) + cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) - err := root.RootCmd.ExecuteContext(ctx) + err := cmd.ExecuteContext(ctx) assert.NoError(t, err) cfgPath := filepath.Join(tempHomeDir, ".databrickscfg") @@ -84,9 +87,10 @@ func TestConfigFileFromEnvNoInteractive(t *testing.T) { t.Cleanup(func() { os.Stdin = oldStdin }) os.Stdin = inp - root.RootCmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) + cmd := cmd.New(ctx) + cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) - err := root.RootCmd.ExecuteContext(ctx) + err := cmd.ExecuteContext(ctx) assert.NoError(t, err) _, err = os.Stat(cfgPath) @@ -112,9 +116,10 @@ func TestCustomProfileConfigureNoInteractive(t *testing.T) { t.Cleanup(func() { os.Stdin = oldStdin }) os.Stdin = inp - root.RootCmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"}) + cmd := cmd.New(ctx) + cmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"}) - err := root.RootCmd.ExecuteContext(ctx) + err := cmd.ExecuteContext(ctx) assert.NoError(t, err) _, err = os.Stat(cfgPath) diff --git a/cmd/fs/cat.go b/cmd/fs/cat.go index 2cdc40759..8227cd781 100644 --- a/cmd/fs/cat.go +++ b/cmd/fs/cat.go @@ -6,14 +6,16 @@ import ( "github.com/spf13/cobra" ) -var catCmd = &cobra.Command{ - Use: "cat FILE_PATH", - Short: "Show file content", - Long: `Show the contents of a file.`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newCatCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "cat FILE_PATH", + Short: "Show file content", + Long: `Show the contents of a file.`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -26,9 +28,7 @@ var catCmd = &cobra.Command{ return err } return cmdio.RenderReader(ctx, r) - }, -} + } -func init() { - fsCmd.AddCommand(catCmd) + return cmd } diff --git a/cmd/fs/cp.go b/cmd/fs/cp.go index 204d6c33c..294d2daba 100644 --- a/cmd/fs/cp.go +++ b/cmd/fs/cp.go @@ -15,6 +15,9 @@ import ( ) type copy struct { + overwrite bool + recursive bool + ctx context.Context sourceFiler filer.Filer targetFiler filer.Filer @@ -48,7 +51,7 @@ func (c *copy) cpWriteCallback(sourceDir, targetDir string) fs.WalkDirFunc { } func (c *copy) cpDirToDir(sourceDir, targetDir string) error { - if !cpRecursive { + if !c.recursive { return fmt.Errorf("source path %s is a directory. Please specify the --recursive flag", sourceDir) } @@ -71,7 +74,7 @@ func (c *copy) cpFileToFile(sourcePath, targetPath string) error { } defer r.Close() - if cpOverwrite { + if c.overwrite { err = c.targetFiler.Write(c.ctx, targetPath, r, filer.OverwriteIfExists) if err != nil { return err @@ -123,28 +126,30 @@ func (c *copy) emitFileCopiedEvent(sourcePath, targetPath string) error { return cmdio.RenderWithTemplate(c.ctx, event, template) } -var cpOverwrite bool -var cpRecursive bool +func newCpCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "cp SOURCE_PATH TARGET_PATH", + Short: "Copy files and directories to and from DBFS.", + Long: `Copy files to and from DBFS. -// cpCmd represents the fs cp command -var cpCmd = &cobra.Command{ - Use: "cp SOURCE_PATH TARGET_PATH", - Short: "Copy files and directories to and from DBFS.", - Long: `Copy files to and from DBFS. + For paths in DBFS it is required that you specify the "dbfs" scheme. + For example: dbfs:/foo/bar. - For paths in DBFS it is required that you specify the "dbfs" scheme. - For example: dbfs:/foo/bar. + Recursively copying a directory will copy all files inside directory + at SOURCE_PATH to the directory at TARGET_PATH. - Recursively copying a directory will copy all files inside directory - at SOURCE_PATH to the directory at TARGET_PATH. + When copying a file, if TARGET_PATH is a directory, the file will be created + inside the directory, otherwise the file is created at TARGET_PATH. + `, + Args: cobra.ExactArgs(2), + PreRunE: root.MustWorkspaceClient, + } - When copying a file, if TARGET_PATH is a directory, the file will be created - inside the directory, otherwise the file is created at TARGET_PATH. -`, - Args: cobra.ExactArgs(2), - PreRunE: root.MustWorkspaceClient, + var c copy + cmd.Flags().BoolVar(&c.overwrite, "overwrite", false, "overwrite existing files") + cmd.Flags().BoolVarP(&c.recursive, "recursive", "r", false, "recursively copy files from directory") - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() // TODO: Error if a user uses '\' as path separator on windows when "file" @@ -164,22 +169,18 @@ var cpCmd = &cobra.Command{ return err } - sourceScheme := "" + c.sourceScheme = "" if isDbfsPath(fullSourcePath) { - sourceScheme = "dbfs" + c.sourceScheme = "dbfs" } - targetScheme := "" + c.targetScheme = "" if isDbfsPath(fullTargetPath) { - targetScheme = "dbfs" + c.targetScheme = "dbfs" } - c := copy{ - ctx: ctx, - sourceFiler: sourceFiler, - targetFiler: targetFiler, - sourceScheme: sourceScheme, - targetScheme: targetScheme, - } + c.ctx = ctx + c.sourceFiler = sourceFiler + c.targetFiler = targetFiler // Get information about file at source path sourceInfo, err := sourceFiler.Stat(ctx, sourcePath) @@ -200,11 +201,7 @@ var cpCmd = &cobra.Command{ // case 3: source path is a file, and target path is a file return c.cpFileToFile(sourcePath, targetPath) - }, -} + } -func init() { - cpCmd.Flags().BoolVar(&cpOverwrite, "overwrite", false, "overwrite existing files") - cpCmd.Flags().BoolVarP(&cpRecursive, "recursive", "r", false, "recursively copy files from directory") - fsCmd.AddCommand(cpCmd) + return cmd } diff --git a/cmd/fs/fs.go b/cmd/fs/fs.go index a69c4b62d..190220f4a 100644 --- a/cmd/fs/fs.go +++ b/cmd/fs/fs.go @@ -1,17 +1,23 @@ package fs import ( - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" ) -// fsCmd represents the fs command -var fsCmd = &cobra.Command{ - Use: "fs", - Short: "Filesystem related commands", - Long: `Commands to do DBFS operations.`, -} +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "fs", + Short: "Filesystem related commands", + Long: `Commands to do DBFS operations.`, + } -func init() { - root.RootCmd.AddCommand(fsCmd) + cmd.AddCommand( + newCatCommand(), + newCpCommand(), + newLsCommand(), + newMkdirCommand(), + newRmCommand(), + ) + + return cmd } diff --git a/cmd/fs/ls.go b/cmd/fs/ls.go index b06345d50..7ae55e1f4 100644 --- a/cmd/fs/ls.go +++ b/cmd/fs/ls.go @@ -37,15 +37,21 @@ func toJsonDirEntry(f fs.DirEntry, baseDir string, isAbsolute bool) (*jsonDirEnt }, nil } -// lsCmd represents the ls command -var lsCmd = &cobra.Command{ - Use: "ls DIR_PATH", - Short: "Lists files", - Long: `Lists files`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newLsCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "ls DIR_PATH", + Short: "Lists files", + Long: `Lists files`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + var long bool + var absolute bool + cmd.Flags().BoolVarP(&long, "long", "l", false, "Displays full information including size, file type and modification time since Epoch in milliseconds.") + cmd.Flags().BoolVar(&absolute, "absolute", false, "Displays absolute paths.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -60,7 +66,7 @@ var lsCmd = &cobra.Command{ jsonDirEntries := make([]jsonDirEntry, len(entries)) for i, entry := range entries { - jsonDirEntry, err := toJsonDirEntry(entry, args[0], lsAbsolute) + jsonDirEntry, err := toJsonDirEntry(entry, args[0], absolute) if err != nil { return err } @@ -71,7 +77,7 @@ var lsCmd = &cobra.Command{ }) // Use template for long mode if the flag is set - if longMode { + if long { return cmdio.RenderWithTemplate(ctx, jsonDirEntries, cmdio.Heredoc(` {{range .}}{{if .IsDir}}DIRECTORY {{else}}FILE {{end}}{{.Size}} {{.ModTime|pretty_date}} {{.Name}} {{end}} @@ -81,14 +87,7 @@ var lsCmd = &cobra.Command{ {{range .}}{{.Name}} {{end}} `)) - }, -} + } -var longMode bool -var lsAbsolute bool - -func init() { - lsCmd.Flags().BoolVarP(&longMode, "long", "l", false, "Displays full information including size, file type and modification time since Epoch in milliseconds.") - lsCmd.Flags().BoolVar(&lsAbsolute, "absolute", false, "Displays absolute paths.") - fsCmd.AddCommand(lsCmd) + return cmd } diff --git a/cmd/fs/mkdir.go b/cmd/fs/mkdir.go index cb0491393..c6a5e607c 100644 --- a/cmd/fs/mkdir.go +++ b/cmd/fs/mkdir.go @@ -5,17 +5,19 @@ import ( "github.com/spf13/cobra" ) -var mkdirCmd = &cobra.Command{ - Use: "mkdir DIR_PATH", - // Alias `mkdirs` for this command exists for legacy purposes. This command - // is called databricks fs mkdirs in our legacy CLI: https://github.com/databricks/databricks-cli - Aliases: []string{"mkdirs"}, - Short: "Make directories", - Long: `Mkdir will create directories along the path to the argument directory.`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newMkdirCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "mkdir DIR_PATH", + // Alias `mkdirs` for this command exists for legacy purposes. This command + // is called databricks fs mkdirs in our legacy CLI: https://github.com/databricks/databricks-cli + Aliases: []string{"mkdirs"}, + Short: "Make directories", + Long: `Mkdir will create directories along the path to the argument directory.`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -24,9 +26,7 @@ var mkdirCmd = &cobra.Command{ } return f.Mkdir(ctx, path) - }, -} + } -func init() { - fsCmd.AddCommand(mkdirCmd) + return cmd } diff --git a/cmd/fs/rm.go b/cmd/fs/rm.go index 21f5adb99..3ce8d3b93 100644 --- a/cmd/fs/rm.go +++ b/cmd/fs/rm.go @@ -6,14 +6,19 @@ import ( "github.com/spf13/cobra" ) -var rmCmd = &cobra.Command{ - Use: "rm PATH", - Short: "Remove files and directories from dbfs.", - Long: `Remove files and directories from dbfs.`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newRmCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "rm PATH", + Short: "Remove files and directories from dbfs.", + Long: `Remove files and directories from dbfs.`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + var recursive bool + cmd.Flags().BoolVarP(&recursive, "recursive", "r", false, "Recursively delete a non-empty directory.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -25,12 +30,7 @@ var rmCmd = &cobra.Command{ return f.Delete(ctx, path, filer.DeleteRecursively) } return f.Delete(ctx, path) - }, -} + } -var recursive bool - -func init() { - rmCmd.Flags().BoolVarP(&recursive, "recursive", "r", false, "Recursively delete a non-empty directory.") - fsCmd.AddCommand(rmCmd) + return cmd } diff --git a/cmd/root/auth.go b/cmd/root/auth.go index ae7f73968..de5648c65 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "net/http" "os" "github.com/databricks/cli/bundle" @@ -11,7 +12,6 @@ import ( "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/config" - "github.com/databricks/databricks-sdk-go/service/iam" "github.com/manifoldco/promptui" "github.com/spf13/cobra" ) @@ -19,20 +19,63 @@ import ( // Placeholders to use as unique keys in context.Context. var workspaceClient int var accountClient int -var currentUser int -func init() { - RootCmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile") - RootCmd.RegisterFlagCompletionFunc("profile", databrickscfg.ProfileCompletion) +func initProfileFlag(cmd *cobra.Command) { + cmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile") + cmd.RegisterFlagCompletionFunc("profile", databrickscfg.ProfileCompletion) +} + +func profileFlagValue(cmd *cobra.Command) (string, bool) { + profileFlag := cmd.Flag("profile") + if profileFlag == nil { + return "", false + } + value := profileFlag.Value.String() + return value, value != "" +} + +// Helper function to create an account client or prompt once if the given configuration is not valid. +func accountClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt bool) (*databricks.AccountClient, error) { + a, err := databricks.NewAccountClient((*databricks.Config)(cfg)) + if err == nil { + err = a.Config.Authenticate(emptyHttpRequest(ctx)) + } + + prompt := false + if allowPrompt && err != nil && cmdio.IsInteractive(ctx) { + // Prompt to select a profile if the current configuration is not an account client. + prompt = prompt || errors.Is(err, databricks.ErrNotAccountClient) + // Prompt to select a profile if the current configuration doesn't resolve to a credential provider. + prompt = prompt || errors.Is(err, config.ErrCannotConfigureAuth) + } + + if !prompt { + // If we are not prompting, we can return early. + return a, err + } + + // Try picking a profile dynamically if the current configuration is not valid. + profile, err := askForAccountProfile(ctx) + if err != nil { + return nil, err + } + a, err = databricks.NewAccountClient(&databricks.Config{Profile: profile}) + if err == nil { + err = a.Config.Authenticate(emptyHttpRequest(ctx)) + if err != nil { + return nil, err + } + } + return a, nil } func MustAccountClient(cmd *cobra.Command, args []string) error { cfg := &config.Config{} - // command-line flag can specify the profile in use - profileFlag := cmd.Flag("profile") - if profileFlag != nil { - cfg.Profile = profileFlag.Value.String() + // The command-line profile flag takes precedence over DATABRICKS_CONFIG_PROFILE. + profile, hasProfileFlag := profileFlagValue(cmd) + if hasProfileFlag { + cfg.Profile = profile } if cfg.Profile == "" { @@ -40,10 +83,7 @@ func MustAccountClient(cmd *cobra.Command, args []string) error { // 1. only admins will have account configured // 2. 99% of admins will have access to just one account // hence, we don't need to create a special "DEFAULT_ACCOUNT" profile yet - _, profiles, err := databrickscfg.LoadProfiles( - databrickscfg.DefaultPath, - databrickscfg.MatchAccountProfiles, - ) + _, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchAccountProfiles) if err != nil { return err } @@ -52,16 +92,8 @@ func MustAccountClient(cmd *cobra.Command, args []string) error { } } -TRY_AUTH: // or try picking a config profile dynamically - a, err := databricks.NewAccountClient((*databricks.Config)(cfg)) - if cmdio.IsInteractive(cmd.Context()) && errors.Is(err, databricks.ErrNotAccountClient) { - profile, err := askForAccountProfile() - if err != nil { - return err - } - cfg = &config.Config{Profile: profile} - goto TRY_AUTH - } + allowPrompt := !hasProfileFlag + a, err := accountClientOrPrompt(cmd.Context(), cfg, allowPrompt) if err != nil { return err } @@ -70,13 +102,48 @@ TRY_AUTH: // or try picking a config profile dynamically return nil } +// Helper function to create a workspace client or prompt once if the given configuration is not valid. +func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt bool) (*databricks.WorkspaceClient, error) { + w, err := databricks.NewWorkspaceClient((*databricks.Config)(cfg)) + if err == nil { + err = w.Config.Authenticate(emptyHttpRequest(ctx)) + } + + prompt := false + if allowPrompt && err != nil && cmdio.IsInteractive(ctx) { + // Prompt to select a profile if the current configuration is not a workspace client. + prompt = prompt || errors.Is(err, databricks.ErrNotWorkspaceClient) + // Prompt to select a profile if the current configuration doesn't resolve to a credential provider. + prompt = prompt || errors.Is(err, config.ErrCannotConfigureAuth) + } + + if !prompt { + // If we are not prompting, we can return early. + return w, err + } + + // Try picking a profile dynamically if the current configuration is not valid. + profile, err := askForWorkspaceProfile(ctx) + if err != nil { + return nil, err + } + w, err = databricks.NewWorkspaceClient(&databricks.Config{Profile: profile}) + if err == nil { + err = w.Config.Authenticate(emptyHttpRequest(ctx)) + if err != nil { + return nil, err + } + } + return w, nil +} + func MustWorkspaceClient(cmd *cobra.Command, args []string) error { cfg := &config.Config{} - // command-line flag takes precedence over environment variable - profileFlag := cmd.Flag("profile") - if profileFlag != nil { - cfg.Profile = profileFlag.Value.String() + // The command-line profile flag takes precedence over DATABRICKS_CONFIG_PROFILE. + profile, hasProfileFlag := profileFlagValue(cmd) + if hasProfileFlag { + cfg.Profile = profile } // try configuring a bundle @@ -91,31 +158,22 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error { cfg = currentBundle.WorkspaceClient().Config } -TRY_AUTH: // or try picking a config profile dynamically + allowPrompt := !hasProfileFlag + w, err := workspaceClientOrPrompt(cmd.Context(), cfg, allowPrompt) + if err != nil { + return err + } + ctx := cmd.Context() - w, err := databricks.NewWorkspaceClient((*databricks.Config)(cfg)) - if err != nil { - return err - } - // get current user identity also to verify validity of configuration - me, err := w.CurrentUser.Me(ctx) - if cmdio.IsInteractive(ctx) && errors.Is(err, config.ErrCannotConfigureAuth) { - profile, err := askForWorkspaceProfile() - if err != nil { - return err - } - cfg = &config.Config{Profile: profile} - goto TRY_AUTH - } - if err != nil { - return err - } - ctx = context.WithValue(ctx, ¤tUser, me) ctx = context.WithValue(ctx, &workspaceClient, w) cmd.SetContext(ctx) return nil } +func SetWorkspaceClient(ctx context.Context, w *databricks.WorkspaceClient) context.Context { + return context.WithValue(ctx, &workspaceClient, w) +} + func transformLoadError(path string, err error) error { if os.IsNotExist(err) { return fmt.Errorf("no configuration file found at %s; please create one first", path) @@ -123,9 +181,12 @@ func transformLoadError(path string, err error) error { return err } -func askForWorkspaceProfile() (string, error) { - path := databrickscfg.DefaultPath - file, profiles, err := databrickscfg.LoadProfiles(path, databrickscfg.MatchWorkspaceProfiles) +func askForWorkspaceProfile(ctx context.Context) (string, error) { + path, err := databrickscfg.GetPath() + if err != nil { + return "", fmt.Errorf("cannot determine Databricks config file path: %w", err) + } + file, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchWorkspaceProfiles) if err != nil { return "", transformLoadError(path, err) } @@ -135,7 +196,7 @@ func askForWorkspaceProfile() (string, error) { case 1: return profiles[0].Name, nil } - i, _, err := (&promptui.Select{ + i, _, err := cmdio.RunSelect(ctx, &promptui.Select{ Label: fmt.Sprintf("Workspace profiles defined in %s", file), Items: profiles, Searcher: profiles.SearchCaseInsensitive, @@ -146,18 +207,19 @@ func askForWorkspaceProfile() (string, error) { Inactive: `{{.Name}}`, Selected: `{{ "Using workspace profile" | faint }}: {{ .Name | bold }}`, }, - Stdin: os.Stdin, - Stdout: os.Stderr, - }).Run() + }) if err != nil { return "", err } return profiles[i].Name, nil } -func askForAccountProfile() (string, error) { - path := databrickscfg.DefaultPath - file, profiles, err := databrickscfg.LoadProfiles(path, databrickscfg.MatchAccountProfiles) +func askForAccountProfile(ctx context.Context) (string, error) { + path, err := databrickscfg.GetPath() + if err != nil { + return "", fmt.Errorf("cannot determine Databricks config file path: %w", err) + } + file, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchAccountProfiles) if err != nil { return "", transformLoadError(path, err) } @@ -167,7 +229,7 @@ func askForAccountProfile() (string, error) { case 1: return profiles[0].Name, nil } - i, _, err := (&promptui.Select{ + i, _, err := cmdio.RunSelect(ctx, &promptui.Select{ Label: fmt.Sprintf("Account profiles defined in %s", file), Items: profiles, Searcher: profiles.SearchCaseInsensitive, @@ -178,15 +240,24 @@ func askForAccountProfile() (string, error) { Inactive: `{{.Name}}`, Selected: `{{ "Using account profile" | faint }}: {{ .Name | bold }}`, }, - Stdin: os.Stdin, - Stdout: os.Stderr, - }).Run() + }) if err != nil { return "", err } return profiles[i].Name, nil } +// To verify that a client is configured correctly, we pass an empty HTTP request +// to a client's `config.Authenticate` function. Note: this functionality +// should be supported by the SDK itself. +func emptyHttpRequest(ctx context.Context) *http.Request { + req, err := http.NewRequestWithContext(ctx, "", "", nil) + if err != nil { + panic(err) + } + return req +} + func WorkspaceClient(ctx context.Context) *databricks.WorkspaceClient { w, ok := ctx.Value(&workspaceClient).(*databricks.WorkspaceClient) if !ok { @@ -202,11 +273,3 @@ func AccountClient(ctx context.Context) *databricks.AccountClient { } return a } - -func Me(ctx context.Context) *iam.User { - me, ok := ctx.Value(¤tUser).(*iam.User) - if !ok { - panic("cannot get current user. Please report it as a bug") - } - return me -} diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go new file mode 100644 index 000000000..30fa9a086 --- /dev/null +++ b/cmd/root/auth_test.go @@ -0,0 +1,183 @@ +package root + +import ( + "context" + "os" + "path/filepath" + "testing" + "time" + + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/config" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestEmptyHttpRequest(t *testing.T) { + ctx, _ := context.WithCancel(context.Background()) + req := emptyHttpRequest(ctx) + assert.Equal(t, req.Context(), ctx) +} + +type promptFn func(ctx context.Context, cfg *config.Config, retry bool) (any, error) + +var accountPromptFn = func(ctx context.Context, cfg *config.Config, retry bool) (any, error) { + return accountClientOrPrompt(ctx, cfg, retry) +} + +var workspacePromptFn = func(ctx context.Context, cfg *config.Config, retry bool) (any, error) { + return workspaceClientOrPrompt(ctx, cfg, retry) +} + +func expectPrompts(t *testing.T, fn promptFn, config *config.Config) { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + // Channel to pass errors from the prompting function back to the test. + errch := make(chan error, 1) + + ctx, io := cmdio.SetupTest(ctx) + go func() { + defer close(errch) + defer cancel() + _, err := fn(ctx, config, true) + errch <- err + }() + + // Expect a prompt + line, _, err := io.Stderr.ReadLine() + if assert.NoError(t, err, "Expected to read a line from stderr") { + assert.Contains(t, string(line), "Search:") + } else { + // If there was an error reading from stderr, the prompting function must have terminated early. + assert.NoError(t, <-errch) + } +} + +func expectReturns(t *testing.T, fn promptFn, config *config.Config) { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + ctx, _ = cmdio.SetupTest(ctx) + client, err := fn(ctx, config, true) + require.NoError(t, err) + require.NotNil(t, client) +} + +func TestAccountClientOrPrompt(t *testing.T) { + testutil.CleanupEnvironment(t) + + dir := t.TempDir() + configFile := filepath.Join(dir, ".databrickscfg") + err := os.WriteFile( + configFile, + []byte(` + [account-1111] + host = https://accounts.azuredatabricks.net/ + account_id = 1111 + token = foobar + + [account-1112] + host = https://accounts.azuredatabricks.net/ + account_id = 1112 + token = foobar + `), + 0755) + require.NoError(t, err) + t.Setenv("DATABRICKS_CONFIG_FILE", configFile) + t.Setenv("PATH", "/nothing") + + t.Run("Prompt if nothing is specified", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{}) + }) + + t.Run("Prompt if a workspace host is specified", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{ + Host: "https://adb-1234567.89.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + }) + }) + + t.Run("Prompt if account ID is not specified", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + Token: "foobar", + }) + }) + + t.Run("Prompt if no credential provider can be configured", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + }) + }) + + t.Run("Returns if configuration is valid", func(t *testing.T) { + expectReturns(t, accountPromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + }) + }) + + t.Run("Returns if a valid profile is specified", func(t *testing.T) { + expectReturns(t, accountPromptFn, &config.Config{ + Profile: "account-1111", + }) + }) +} + +func TestWorkspaceClientOrPrompt(t *testing.T) { + testutil.CleanupEnvironment(t) + + dir := t.TempDir() + configFile := filepath.Join(dir, ".databrickscfg") + err := os.WriteFile( + configFile, + []byte(` + [workspace-1111] + host = https://adb-1111.11.azuredatabricks.net/ + token = foobar + + [workspace-1112] + host = https://adb-1112.12.azuredatabricks.net/ + token = foobar + `), + 0755) + require.NoError(t, err) + t.Setenv("DATABRICKS_CONFIG_FILE", configFile) + t.Setenv("PATH", "/nothing") + + t.Run("Prompt if nothing is specified", func(t *testing.T) { + expectPrompts(t, workspacePromptFn, &config.Config{}) + }) + + t.Run("Prompt if an account host is specified", func(t *testing.T) { + expectPrompts(t, workspacePromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + }) + }) + + t.Run("Prompt if no credential provider can be configured", func(t *testing.T) { + expectPrompts(t, workspacePromptFn, &config.Config{ + Host: "https://adb-1111.11.azuredatabricks.net/", + }) + }) + + t.Run("Returns if configuration is valid", func(t *testing.T) { + expectReturns(t, workspacePromptFn, &config.Config{ + Host: "https://adb-1111.11.azuredatabricks.net/", + Token: "foobar", + }) + }) + + t.Run("Returns if a valid profile is specified", func(t *testing.T) { + expectReturns(t, workspacePromptFn, &config.Config{ + Profile: "workspace-1111", + }) + }) +} diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 8eab7c2c7..3f9d90db6 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -1,20 +1,20 @@ package root import ( - "os" + "context" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/env" + envlib "github.com/databricks/cli/libs/env" "github.com/spf13/cobra" "golang.org/x/exp/maps" ) -const envName = "DATABRICKS_BUNDLE_ENV" - -// getEnvironment returns the name of the environment to operate in. -func getEnvironment(cmd *cobra.Command) (value string) { +// getTarget returns the name of the target to operate in. +func getTarget(cmd *cobra.Command) (value string) { // The command line flag takes precedence. - flag := cmd.Flag("environment") + flag := cmd.Flag("target") if flag != nil { value = flag.Value.String() if value != "" { @@ -22,8 +22,17 @@ func getEnvironment(cmd *cobra.Command) (value string) { } } + oldFlag := cmd.Flag("environment") + if oldFlag != nil { + value = oldFlag.Value.String() + if value != "" { + return + } + } + // If it's not set, use the environment variable. - return os.Getenv(envName) + target, _ := env.Target(cmd.Context()) + return target } func getProfile(cmd *cobra.Command) (value string) { @@ -37,12 +46,13 @@ func getProfile(cmd *cobra.Command) (value string) { } // If it's not set, use the environment variable. - return os.Getenv("DATABRICKS_CONFIG_PROFILE") + return envlib.Get(cmd.Context(), "DATABRICKS_CONFIG_PROFILE") } // loadBundle loads the bundle configuration and applies default mutators. -func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, error)) (*bundle.Bundle, error) { - b, err := load() +func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context) (*bundle.Bundle, error)) (*bundle.Bundle, error) { + ctx := cmd.Context() + b, err := load(ctx) if err != nil { return nil, err } @@ -57,7 +67,6 @@ func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, b.Config.Workspace.Profile = profile } - ctx := cmd.Context() err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) if err != nil { return nil, err @@ -67,7 +76,7 @@ func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, } // configureBundle loads the bundle configuration and configures it on the command's context. -func configureBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, error)) error { +func configureBundle(cmd *cobra.Command, args []string, load func(ctx context.Context) (*bundle.Bundle, error)) error { b, err := loadBundle(cmd, args, load) if err != nil { return err @@ -79,11 +88,11 @@ func configureBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bun } var m bundle.Mutator - env := getEnvironment(cmd) + env := getTarget(cmd) if env == "" { - m = mutator.SelectDefaultEnvironment() + m = mutator.SelectDefaultTarget() } else { - m = mutator.SelectEnvironment(env) + m = mutator.SelectTarget(env) } ctx := cmd.Context() @@ -107,19 +116,27 @@ func TryConfigureBundle(cmd *cobra.Command, args []string) error { return configureBundle(cmd, args, bundle.TryLoad) } -// environmentCompletion executes to autocomplete the argument to the environment flag. -func environmentCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { +// targetCompletion executes to autocomplete the argument to the target flag. +func targetCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { b, err := loadBundle(cmd, args, bundle.MustLoad) if err != nil { cobra.CompErrorln(err.Error()) return nil, cobra.ShellCompDirectiveError } - return maps.Keys(b.Config.Environments), cobra.ShellCompDirectiveDefault + return maps.Keys(b.Config.Targets), cobra.ShellCompDirectiveDefault } -func init() { - // To operate in the context of a bundle, all commands must take an "environment" parameter. - RootCmd.PersistentFlags().StringP("environment", "e", "", "bundle environment to use (if applicable)") - RootCmd.RegisterFlagCompletionFunc("environment", environmentCompletion) +func initTargetFlag(cmd *cobra.Command) { + // To operate in the context of a bundle, all commands must take an "target" parameter. + cmd.PersistentFlags().StringP("target", "t", "", "bundle target to use (if applicable)") + cmd.RegisterFlagCompletionFunc("target", targetCompletion) +} + +// DEPRECATED flag +func initEnvironmentFlag(cmd *cobra.Command) { + // To operate in the context of a bundle, all commands must take an "environment" parameter. + cmd.PersistentFlags().StringP("environment", "e", "", "bundle target to use (if applicable)") + cmd.PersistentFlags().MarkDeprecated("environment", "use --target flag instead") + cmd.RegisterFlagCompletionFunc("environment", targetCompletion) } diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 8dc771bd4..3f9641b7e 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -9,6 +9,8 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/internal/testutil" + "github.com/spf13/cobra" "github.com/stretchr/testify/assert" ) @@ -27,15 +29,18 @@ func setupDatabricksCfg(t *testing.T) { t.Setenv(homeEnvVar, tempHomeDir) } -func setup(t *testing.T, host string) *bundle.Bundle { +func emptyCommand(t *testing.T) *cobra.Command { + ctx := context.Background() + cmd := &cobra.Command{} + cmd.SetContext(ctx) + initProfileFlag(cmd) + return cmd +} + +func setup(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { setupDatabricksCfg(t) - ctx := context.Background() - RootCmd.SetContext(ctx) - _, err := initializeLogger(ctx) - assert.NoError(t, err) - - err = configureBundle(RootCmd, []string{"validate"}, func() (*bundle.Bundle, error) { + err := configureBundle(cmd, []string{"validate"}, func(_ context.Context) (*bundle.Bundle, error) { return &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -48,72 +53,123 @@ func setup(t *testing.T, host string) *bundle.Bundle { }, nil }) assert.NoError(t, err) - - return bundle.Get(RootCmd.Context()) + return bundle.Get(cmd.Context()) } func TestBundleConfigureDefault(t *testing.T) { - b := setup(t, "https://x.com") + testutil.CleanupEnvironment(t) + + cmd := emptyCommand(t) + b := setup(t, cmd, "https://x.com") assert.NotPanics(t, func() { b.WorkspaceClient() }) } func TestBundleConfigureWithMultipleMatches(t *testing.T) { - b := setup(t, "https://a.com") + testutil.CleanupEnvironment(t) + + cmd := emptyCommand(t) + b := setup(t, cmd, "https://a.com") assert.Panics(t, func() { b.WorkspaceClient() }) } func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { - RootCmd.Flag("profile").Value.Set("NOEXIST") + testutil.CleanupEnvironment(t) - b := setup(t, "https://x.com") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("NOEXIST") + + b := setup(t, cmd, "https://x.com") assert.PanicsWithError(t, "no matching config profiles found", func() { b.WorkspaceClient() }) } func TestBundleConfigureWithMismatchedProfile(t *testing.T) { - RootCmd.Flag("profile").Value.Set("PROFILE-1") + testutil.CleanupEnvironment(t) - b := setup(t, "https://x.com") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-1") + + b := setup(t, cmd, "https://x.com") assert.PanicsWithError(t, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() { b.WorkspaceClient() }) } func TestBundleConfigureWithCorrectProfile(t *testing.T) { - RootCmd.Flag("profile").Value.Set("PROFILE-1") + testutil.CleanupEnvironment(t) - b := setup(t, "https://a.com") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-1") + + b := setup(t, cmd, "https://a.com") assert.NotPanics(t, func() { b.WorkspaceClient() }) } func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) { + testutil.CleanupEnvironment(t) t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-1") - t.Cleanup(func() { - t.Setenv("DATABRICKS_CONFIG_PROFILE", "") - }) - b := setup(t, "https://x.com") + cmd := emptyCommand(t) + b := setup(t, cmd, "https://x.com") assert.PanicsWithError(t, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() { b.WorkspaceClient() }) } func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) { + testutil.CleanupEnvironment(t) t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") - t.Cleanup(func() { - t.Setenv("DATABRICKS_CONFIG_PROFILE", "") - }) - RootCmd.Flag("profile").Value.Set("PROFILE-1") - b := setup(t, "https://a.com") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-1") + + b := setup(t, cmd, "https://a.com") assert.NotPanics(t, func() { b.WorkspaceClient() }) } + +func TestTargetFlagFull(t *testing.T) { + cmd := emptyCommand(t) + initTargetFlag(cmd) + cmd.SetArgs([]string{"version", "--target", "development"}) + + ctx := context.Background() + err := cmd.ExecuteContext(ctx) + assert.NoError(t, err) + + assert.Equal(t, getTarget(cmd), "development") +} + +func TestTargetFlagShort(t *testing.T) { + cmd := emptyCommand(t) + initTargetFlag(cmd) + cmd.SetArgs([]string{"version", "-t", "production"}) + + ctx := context.Background() + err := cmd.ExecuteContext(ctx) + assert.NoError(t, err) + + assert.Equal(t, getTarget(cmd), "production") +} + +// TODO: remove when environment flag is fully deprecated +func TestTargetEnvironmentFlag(t *testing.T) { + cmd := emptyCommand(t) + initTargetFlag(cmd) + initEnvironmentFlag(cmd) + cmd.SetArgs([]string{"version", "--environment", "development"}) + + ctx := context.Background() + err := cmd.ExecuteContext(ctx) + assert.NoError(t, err) + + assert.Equal(t, getTarget(cmd), "development") +} diff --git a/cmd/root/io.go b/cmd/root/io.go index 93830c804..23c7d6c64 100644 --- a/cmd/root/io.go +++ b/cmd/root/io.go @@ -1,41 +1,51 @@ package root import ( - "os" - "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" ) const envOutputFormat = "DATABRICKS_OUTPUT_FORMAT" -var outputType flags.Output = flags.OutputText +type outputFlag struct { + output flags.Output +} -func init() { - // Configure defaults from environment, if applicable. - // If the provided value is invalid it is ignored. - if v, ok := os.LookupEnv(envOutputFormat); ok { - outputType.Set(v) +func initOutputFlag(cmd *cobra.Command) *outputFlag { + f := outputFlag{ + output: flags.OutputText, } - RootCmd.PersistentFlags().VarP(&outputType, "output", "o", "output type: text or json") + // Configure defaults from environment, if applicable. + // If the provided value is invalid it is ignored. + if v, ok := env.Lookup(cmd.Context(), envOutputFormat); ok { + f.output.Set(v) + } + + cmd.PersistentFlags().VarP(&f.output, "output", "o", "output type: text or json") + return &f } -func OutputType() flags.Output { - return outputType +func OutputType(cmd *cobra.Command) flags.Output { + f, ok := cmd.Flag("output").Value.(*flags.Output) + if !ok { + panic("output flag not defined") + } + + return *f } -func initializeIO(cmd *cobra.Command) error { +func (f *outputFlag) initializeIO(cmd *cobra.Command) error { var template string if cmd.Annotations != nil { // rely on zeroval being an empty string template = cmd.Annotations["template"] } - cmdIO := cmdio.NewIO(outputType, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), template) + cmdIO := cmdio.NewIO(f.output, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), template) ctx := cmdio.InContext(cmd.Context(), cmdIO) cmd.SetContext(ctx) - return nil } diff --git a/cmd/root/logger.go b/cmd/root/logger.go index 89d707604..dca07ca4b 100644 --- a/cmd/root/logger.go +++ b/cmd/root/logger.go @@ -4,13 +4,14 @@ import ( "context" "fmt" "io" - "os" + "log/slog" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/log" "github.com/fatih/color" - "golang.org/x/exp/slog" + "github.com/spf13/cobra" ) const ( @@ -51,11 +52,12 @@ func (l *friendlyHandler) coloredLevel(rec slog.Record) string { func (l *friendlyHandler) Handle(ctx context.Context, rec slog.Record) error { t := fmt.Sprintf("%02d:%02d", rec.Time.Hour(), rec.Time.Minute()) attrs := "" - rec.Attrs(func(a slog.Attr) { + rec.Attrs(func(a slog.Attr) bool { attrs += fmt.Sprintf(" %s%s%s", color.CyanString(a.Key), color.CyanString("="), color.YellowString(a.Value.String())) + return true }) msg := fmt.Sprintf("%s %s %s%s\n", color.MagentaString(t), @@ -66,28 +68,34 @@ func (l *friendlyHandler) Handle(ctx context.Context, rec slog.Record) error { return err } -func makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error) { - switch logOutput { +type logFlags struct { + file flags.LogFileFlag + level flags.LogLevelFlag + output flags.Output +} + +func (f *logFlags) makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error) { + switch f.output { case flags.OutputJSON: - return opts.NewJSONHandler(logFile.Writer()), nil + return slog.NewJSONHandler(f.file.Writer(), &opts), nil case flags.OutputText: - w := logFile.Writer() + w := f.file.Writer() if cmdio.IsTTY(w) { return &friendlyHandler{ - Handler: opts.NewTextHandler(w), + Handler: slog.NewTextHandler(w, &opts), w: w, }, nil } - return opts.NewTextHandler(w), nil + return slog.NewTextHandler(w, &opts), nil default: - return nil, fmt.Errorf("invalid log output mode: %s", logOutput) + return nil, fmt.Errorf("invalid log output mode: %s", f.output) } } -func initializeLogger(ctx context.Context) (context.Context, error) { +func (f *logFlags) initializeContext(ctx context.Context) (context.Context, error) { opts := slog.HandlerOptions{} - opts.Level = logLevel.Level() + opts.Level = f.level.Level() opts.AddSource = true opts.ReplaceAttr = log.ReplaceAttrFunctions{ log.ReplaceLevelAttr, @@ -95,12 +103,12 @@ func initializeLogger(ctx context.Context) (context.Context, error) { }.ReplaceAttr // Open the underlying log file if the user configured an actual file to log to. - err := logFile.Open() + err := f.file.Open() if err != nil { return nil, err } - handler, err := makeLogHandler(opts) + handler, err := f.makeLogHandler(opts) if err != nil { return nil, err } @@ -109,27 +117,30 @@ func initializeLogger(ctx context.Context) (context.Context, error) { return log.NewContext(ctx, slog.Default()), nil } -var logFile = flags.NewLogFileFlag() -var logLevel = flags.NewLogLevelFlag() -var logOutput = flags.OutputText +func initLogFlags(cmd *cobra.Command) *logFlags { + f := logFlags{ + file: flags.NewLogFileFlag(), + level: flags.NewLogLevelFlag(), + output: flags.OutputText, + } -func init() { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. - if v, ok := os.LookupEnv(envLogFile); ok { - logFile.Set(v) + if v, ok := env.Lookup(cmd.Context(), envLogFile); ok { + f.file.Set(v) } - if v, ok := os.LookupEnv(envLogLevel); ok { - logLevel.Set(v) + if v, ok := env.Lookup(cmd.Context(), envLogLevel); ok { + f.level.Set(v) } - if v, ok := os.LookupEnv(envLogFormat); ok { - logOutput.Set(v) + if v, ok := env.Lookup(cmd.Context(), envLogFormat); ok { + f.output.Set(v) } - RootCmd.PersistentFlags().Var(&logFile, "log-file", "file to write logs to") - RootCmd.PersistentFlags().Var(&logLevel, "log-level", "log level") - RootCmd.PersistentFlags().Var(&logOutput, "log-format", "log output format (text or json)") - RootCmd.RegisterFlagCompletionFunc("log-file", logFile.Complete) - RootCmd.RegisterFlagCompletionFunc("log-level", logLevel.Complete) - RootCmd.RegisterFlagCompletionFunc("log-format", logOutput.Complete) + cmd.PersistentFlags().Var(&f.file, "log-file", "file to write logs to") + cmd.PersistentFlags().Var(&f.level, "log-level", "log level") + cmd.PersistentFlags().Var(&f.output, "log-format", "log output format (text or json)") + cmd.RegisterFlagCompletionFunc("log-file", f.file.Complete) + cmd.RegisterFlagCompletionFunc("log-level", f.level.Complete) + cmd.RegisterFlagCompletionFunc("log-format", f.output.Complete) + return &f } diff --git a/cmd/root/progress_logger.go b/cmd/root/progress_logger.go index fbd90ebb8..328b99476 100644 --- a/cmd/root/progress_logger.go +++ b/cmd/root/progress_logger.go @@ -6,43 +6,57 @@ import ( "os" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/flags" + "github.com/spf13/cobra" "golang.org/x/term" ) const envProgressFormat = "DATABRICKS_CLI_PROGRESS_FORMAT" -func resolveModeDefault(format flags.ProgressLogFormat) flags.ProgressLogFormat { - if (logLevel.String() == "disabled" || logFile.String() != "stderr") && +type progressLoggerFlag struct { + flags.ProgressLogFormat + + log *logFlags +} + +func (f *progressLoggerFlag) resolveModeDefault(format flags.ProgressLogFormat) flags.ProgressLogFormat { + if (f.log.level.String() == "disabled" || f.log.file.String() != "stderr") && term.IsTerminal(int(os.Stderr.Fd())) { return flags.ModeInplace } return flags.ModeAppend } -func initializeProgressLogger(ctx context.Context) (context.Context, error) { - if logLevel.String() != "disabled" && logFile.String() == "stderr" && - progressFormat == flags.ModeInplace { +func (f *progressLoggerFlag) initializeContext(ctx context.Context) (context.Context, error) { + if f.log.level.String() != "disabled" && f.log.file.String() == "stderr" && + f.ProgressLogFormat == flags.ModeInplace { return nil, fmt.Errorf("inplace progress logging cannot be used when log-file is stderr") } - format := progressFormat + format := f.ProgressLogFormat if format == flags.ModeDefault { - format = resolveModeDefault(format) + format = f.resolveModeDefault(format) } progressLogger := cmdio.NewLogger(format) return cmdio.NewContext(ctx, progressLogger), nil } -var progressFormat = flags.NewProgressLogFormat() +func initProgressLoggerFlag(cmd *cobra.Command, logFlags *logFlags) *progressLoggerFlag { + f := progressLoggerFlag{ + ProgressLogFormat: flags.NewProgressLogFormat(), + + log: logFlags, + } -func init() { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. - if v, ok := os.LookupEnv(envProgressFormat); ok { - progressFormat.Set(v) + if v, ok := env.Lookup(cmd.Context(), envProgressFormat); ok { + f.Set(v) } - RootCmd.PersistentFlags().Var(&progressFormat, "progress-format", "format for progress logs (append, inplace, json)") - RootCmd.RegisterFlagCompletionFunc("progress-format", progressFormat.Complete) + + cmd.PersistentFlags().Var(&f.ProgressLogFormat, "progress-format", "format for progress logs (append, inplace, json)") + cmd.RegisterFlagCompletionFunc("progress-format", f.ProgressLogFormat.Complete) + return &f } diff --git a/cmd/root/progress_logger_test.go b/cmd/root/progress_logger_test.go index 30359257c..9dceee8d5 100644 --- a/cmd/root/progress_logger_test.go +++ b/cmd/root/progress_logger_test.go @@ -6,38 +6,62 @@ import ( "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" + "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) +type progressLoggerTest struct { + *cobra.Command + *logFlags + *progressLoggerFlag +} + +func initializeProgressLoggerTest(t *testing.T) ( + *progressLoggerTest, + *flags.LogLevelFlag, + *flags.LogFileFlag, + *flags.ProgressLogFormat, +) { + plt := &progressLoggerTest{ + Command: &cobra.Command{}, + } + plt.logFlags = initLogFlags(plt.Command) + plt.progressLoggerFlag = initProgressLoggerFlag(plt.Command, plt.logFlags) + return plt, &plt.logFlags.level, &plt.logFlags.file, &plt.progressLoggerFlag.ProgressLogFormat +} + func TestInitializeErrorOnIncompatibleConfig(t *testing.T) { + plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) logLevel.Set("info") logFile.Set("stderr") progressFormat.Set("inplace") - _, err := initializeProgressLogger(context.Background()) + _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.ErrorContains(t, err, "inplace progress logging cannot be used when log-file is stderr") } func TestNoErrorOnDisabledLogLevel(t *testing.T) { + plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) logLevel.Set("disabled") logFile.Set("stderr") progressFormat.Set("inplace") - _, err := initializeProgressLogger(context.Background()) + _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.NoError(t, err) } func TestNoErrorOnNonStderrLogFile(t *testing.T) { + plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) logLevel.Set("info") logFile.Set("stdout") progressFormat.Set("inplace") - _, err := initializeProgressLogger(context.Background()) + _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.NoError(t, err) } func TestDefaultLoggerModeResolution(t *testing.T) { - progressFormat = flags.NewProgressLogFormat() - require.Equal(t, progressFormat, flags.ModeDefault) - ctx, err := initializeProgressLogger(context.Background()) + plt, _, _, progressFormat := initializeProgressLoggerTest(t) + require.Equal(t, *progressFormat, flags.ModeDefault) + ctx, err := plt.progressLoggerFlag.initializeContext(context.Background()) require.NoError(t, err) logger, ok := cmdio.FromContext(ctx) assert.True(t, ok) diff --git a/cmd/root/root.go b/cmd/root/root.go index 3b940a491..38eb42ccb 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -6,33 +6,47 @@ import ( "os" "strings" + "log/slog" + "github.com/databricks/cli/internal/build" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" - "golang.org/x/exp/slog" ) -// RootCmd represents the base command when called without any subcommands -var RootCmd = &cobra.Command{ - Use: "databricks", - Short: "Databricks CLI", - Version: build.GetInfo().Version, +func New(ctx context.Context) *cobra.Command { + cmd := &cobra.Command{ + Use: "databricks", + Short: "Databricks CLI", + Version: build.GetInfo().Version, - // Cobra prints the usage string to stderr if a command returns an error. - // This usage string should only be displayed if an invalid combination of flags - // is specified and not when runtime errors occur (e.g. resource not found). - // The usage string is include in [flagErrorFunc] for flag errors only. - SilenceUsage: true, + // Cobra prints the usage string to stderr if a command returns an error. + // This usage string should only be displayed if an invalid combination of flags + // is specified and not when runtime errors occur (e.g. resource not found). + // The usage string is include in [flagErrorFunc] for flag errors only. + SilenceUsage: true, - // Silence error printing by cobra. Errors are printed through cmdio. - SilenceErrors: true, + // Silence error printing by cobra. Errors are printed through cmdio. + SilenceErrors: true, + } - PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + // Pass the context along through the command during initialization. + // It will be overwritten when the command is executed. + cmd.SetContext(ctx) + + // Initialize flags + logFlags := initLogFlags(cmd) + progressLoggerFlag := initProgressLoggerFlag(cmd, logFlags) + outputFlag := initOutputFlag(cmd) + initProfileFlag(cmd) + initEnvironmentFlag(cmd) + initTargetFlag(cmd) + + cmd.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() // Configure default logger. - ctx, err := initializeLogger(ctx) + ctx, err := logFlags.initializeContext(ctx) if err != nil { return err } @@ -43,7 +57,7 @@ var RootCmd = &cobra.Command{ slog.String("args", strings.Join(os.Args, ", "))) // Configure progress logger - ctx, err = initializeProgressLogger(ctx) + ctx, err = progressLoggerFlag.initializeContext(ctx) if err != nil { return err } @@ -51,7 +65,7 @@ var RootCmd = &cobra.Command{ cmd.SetContext(ctx) // Configure command IO - err = initializeIO(cmd) + err = outputFlag.initializeIO(cmd) if err != nil { return err } @@ -63,7 +77,11 @@ var RootCmd = &cobra.Command{ ctx = withUpstreamInUserAgent(ctx) cmd.SetContext(ctx) return nil - }, + } + + cmd.SetFlagErrorFunc(flagErrorFunc) + cmd.SetVersionTemplate("Databricks CLI v{{.Version}}\n") + return cmd } // Wrap flag errors to include the usage string. @@ -73,12 +91,12 @@ func flagErrorFunc(c *cobra.Command, err error) error { // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. -func Execute() { +func Execute(cmd *cobra.Command) { // TODO: deferred panic recovery ctx := context.Background() // Run the command - cmd, err := RootCmd.ExecuteContextC(ctx) + cmd, err := cmd.ExecuteContextC(ctx) if err != nil { // If cmdio logger initialization succeeds, then this function logs with the // initialized cmdio logger, otherwise with the default cmdio logger @@ -103,8 +121,3 @@ func Execute() { os.Exit(1) } } - -func init() { - RootCmd.SetFlagErrorFunc(flagErrorFunc) - RootCmd.SetVersionTemplate("Databricks CLI v{{.Version}}\n") -} diff --git a/cmd/root/user_agent_upstream.go b/cmd/root/user_agent_upstream.go index 3e173bda8..f580b4263 100644 --- a/cmd/root/user_agent_upstream.go +++ b/cmd/root/user_agent_upstream.go @@ -2,8 +2,8 @@ package root import ( "context" - "os" + "github.com/databricks/cli/libs/env" "github.com/databricks/databricks-sdk-go/useragent" ) @@ -16,7 +16,7 @@ const upstreamKey = "upstream" const upstreamVersionKey = "upstream-version" func withUpstreamInUserAgent(ctx context.Context) context.Context { - value := os.Getenv(upstreamEnvVar) + value := env.Get(ctx, upstreamEnvVar) if value == "" { return ctx } @@ -24,7 +24,7 @@ func withUpstreamInUserAgent(ctx context.Context) context.Context { ctx = useragent.InContext(ctx, upstreamKey, value) // Include upstream version as well, if set. - value = os.Getenv(upstreamVersionEnvVar) + value = env.Get(ctx, upstreamVersionEnvVar) if value == "" { return ctx } diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index d13a85d03..5fdfb169d 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -17,21 +17,36 @@ import ( "github.com/spf13/cobra" ) -func syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) (*sync.SyncOptions, error) { +type syncFlags struct { + // project files polling interval + interval time.Duration + full bool + watch bool + output flags.Output +} + +func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) (*sync.SyncOptions, error) { if len(args) > 0 { return nil, fmt.Errorf("SRC and DST are not configurable in the context of a bundle") } - cacheDir, err := b.CacheDir() + cacheDir, err := b.CacheDir(cmd.Context()) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } + includes, err := b.GetSyncIncludePatterns(cmd.Context()) + if err != nil { + return nil, fmt.Errorf("cannot get list of sync includes: %w", err) + } + opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, - Full: full, - PollInterval: interval, + Include: includes, + Exclude: b.Config.Sync.Exclude, + Full: f.full, + PollInterval: f.interval, SnapshotBasePath: cacheDir, WorkspaceClient: b.WorkspaceClient(), @@ -39,7 +54,7 @@ func syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) return &opts, nil } -func syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, error) { +func (f *syncFlags) syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, error) { if len(args) != 2 { return nil, flag.ErrHelp } @@ -47,8 +62,8 @@ func syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, opts := sync.SyncOptions{ LocalPath: args[0], RemotePath: args[1], - Full: full, - PollInterval: interval, + Full: f.full, + PollInterval: f.interval, // We keep existing behavior for VS Code extension where if there is // no bundle defined, we store the snapshots in `.databricks`. @@ -60,19 +75,28 @@ func syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, return &opts, nil } -var syncCmd = &cobra.Command{ - Use: "sync [flags] SRC DST", - Short: "Synchronize a local directory to a workspace directory", - Args: cobra.MaximumNArgs(2), +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "sync [flags] SRC DST", + Short: "Synchronize a local directory to a workspace directory", + Args: cobra.MaximumNArgs(2), + } - // PreRunE: root.TryConfigureBundle, - RunE: func(cmd *cobra.Command, args []string) error { + f := syncFlags{ + output: flags.OutputText, + } + cmd.Flags().DurationVar(&f.interval, "interval", 1*time.Second, "file system polling interval (for --watch)") + cmd.Flags().BoolVar(&f.full, "full", false, "perform full synchronization (default is incremental)") + cmd.Flags().BoolVar(&f.watch, "watch", false, "watch local file system for changes") + cmd.Flags().Var(&f.output, "output", "type of output format") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { var opts *sync.SyncOptions var err error // // To be uncommented and used once our VS Code extension is bundle aware. - // Until then, this could interfere with extension usage where a `bundle.yml` file is present. + // Until then, this could interfere with extension usage where a `databricks.yml` file is present. // See https://github.com/databricks/cli/pull/207. // // b := bundle.GetOrNil(cmd.Context()) @@ -84,7 +108,7 @@ var syncCmd = &cobra.Command{ // } // opts, err = syncOptionsFromBundle(cmd, args, b) // } else { - opts, err = syncOptionsFromArgs(cmd, args) + opts, err = f.syncOptionsFromArgs(cmd, args) // } if err != nil { return err @@ -97,7 +121,7 @@ var syncCmd = &cobra.Command{ } var outputFunc func(context.Context, <-chan sync.Event, io.Writer) - switch output { + switch f.output { case flags.OutputText: outputFunc = textOutput case flags.OutputJSON: @@ -113,7 +137,7 @@ var syncCmd = &cobra.Command{ }() } - if watch { + if f.watch { err = s.RunContinuous(ctx) } else { err = s.RunOnce(ctx) @@ -122,9 +146,9 @@ var syncCmd = &cobra.Command{ s.Close() wg.Wait() return err - }, + } - ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { err := root.TryConfigureBundle(cmd, args) if err != nil { return nil, cobra.ShellCompDirectiveError @@ -149,19 +173,7 @@ var syncCmd = &cobra.Command{ default: return nil, cobra.ShellCompDirectiveNoFileComp } - }, -} + } -// project files polling interval -var interval time.Duration -var full bool -var watch bool -var output flags.Output = flags.OutputText - -func init() { - root.RootCmd.AddCommand(syncCmd) - syncCmd.Flags().DurationVar(&interval, "interval", 1*time.Second, "file system polling interval (for --watch)") - syncCmd.Flags().BoolVar(&full, "full", false, "perform full synchronization (default is incremental)") - syncCmd.Flags().BoolVar(&watch, "watch", false, "watch local file system for changes") - syncCmd.Flags().Var(&output, "output", "type of output format") + return cmd } diff --git a/cmd/sync/sync_test.go b/cmd/sync/sync_test.go index 2d8c8b113..06e97540f 100644 --- a/cmd/sync/sync_test.go +++ b/cmd/sync/sync_test.go @@ -18,7 +18,7 @@ func TestSyncOptionsFromBundle(t *testing.T) { Path: tempDir, Bundle: config.Bundle{ - Environment: "default", + Target: "default", }, Workspace: config.Workspace{ @@ -27,7 +27,8 @@ func TestSyncOptionsFromBundle(t *testing.T) { }, } - opts, err := syncOptionsFromBundle(syncCmd, []string{}, b) + f := syncFlags{} + opts, err := f.syncOptionsFromBundle(New(), []string{}, b) require.NoError(t, err) assert.Equal(t, tempDir, opts.LocalPath) assert.Equal(t, "/Users/jane@doe.com/path", opts.RemotePath) @@ -37,16 +38,18 @@ func TestSyncOptionsFromBundle(t *testing.T) { func TestSyncOptionsFromArgsRequiredTwoArgs(t *testing.T) { var err error - _, err = syncOptionsFromArgs(syncCmd, []string{}) + f := syncFlags{} + _, err = f.syncOptionsFromArgs(New(), []string{}) require.ErrorIs(t, err, flag.ErrHelp) - _, err = syncOptionsFromArgs(syncCmd, []string{"foo"}) + _, err = f.syncOptionsFromArgs(New(), []string{"foo"}) require.ErrorIs(t, err, flag.ErrHelp) - _, err = syncOptionsFromArgs(syncCmd, []string{"foo", "bar", "qux"}) + _, err = f.syncOptionsFromArgs(New(), []string{"foo", "bar", "qux"}) require.ErrorIs(t, err, flag.ErrHelp) } func TestSyncOptionsFromArgs(t *testing.T) { - opts, err := syncOptionsFromArgs(syncCmd, []string{"/local", "/remote"}) + f := syncFlags{} + opts, err := f.syncOptionsFromArgs(New(), []string{"/local", "/remote"}) require.NoError(t, err) assert.Equal(t, "/local", opts.LocalPath) assert.Equal(t, "/remote", opts.RemotePath) diff --git a/cmd/version/version.go b/cmd/version/version.go index 1f772424f..653fbb897 100644 --- a/cmd/version/version.go +++ b/cmd/version/version.go @@ -1,25 +1,24 @@ package version import ( - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/internal/build" "github.com/databricks/cli/libs/cmdio" "github.com/spf13/cobra" ) -var versionCmd = &cobra.Command{ - Use: "version", - Args: cobra.NoArgs, +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "version", + Args: cobra.NoArgs, + Short: "Retrieve information about the current version of this CLI", + Annotations: map[string]string{ + "template": "Databricks CLI v{{.Version}}\n", + }, + } - Annotations: map[string]string{ - "template": "Databricks CLI v{{.Version}}\n", - }, - - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { return cmdio.Render(cmd.Context(), build.GetInfo()) - }, -} + } -func init() { - root.RootCmd.AddCommand(versionCmd) + return cmd } diff --git a/cmd/workspace/alerts/alerts.go b/cmd/workspace/alerts/alerts.go index 91417c63f..7c98f7ee8 100755 --- a/cmd/workspace/alerts/alerts.go +++ b/cmd/workspace/alerts/alerts.go @@ -12,45 +12,66 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "alerts", - Short: `The alerts API can be used to perform CRUD operations on alerts.`, - Long: `The alerts API can be used to perform CRUD operations on alerts. An alert is a +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "alerts", + Short: `The alerts API can be used to perform CRUD operations on alerts.`, + Long: `The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.CreateAlert -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateAlert, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateAlert + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the alert.`) - createCmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) + cmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`) + cmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create an alert.`, - Long: `Create an alert. + cmd.Use = "create" + cmd.Short = `Create an alert.` + cmd.Long = `Create an alert. Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification - destinations if the condition was met.`, + destinations if the condition was met.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -68,33 +89,54 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteAlertRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteAlertRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteAlertRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ALERT_ID", - Short: `Delete an alert.`, - Long: `Delete an alert. + cmd.Use = "delete ALERT_ID" + cmd.Short = `Delete an alert.` + cmd.Long = `Delete an alert. Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note:** Unlike queries and dashboards, alerts cannot be moved to - the trash.`, + the trash.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -122,31 +164,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sql.GetAlertRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetAlertRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sql.GetAlertRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ALERT_ID", - Short: `Get an alert.`, - Long: `Get an alert. + cmd.Use = "get ALERT_ID" + cmd.Short = `Get an alert.` + cmd.Long = `Get an alert. - Gets an alert.`, + Gets an alert.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -174,29 +237,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get alerts.`, - Long: `Get alerts. + cmd.Use = "list" + cmd.Short = `Get alerts.` + cmd.Long = `Get alerts. - Gets a list of alerts.`, + Gets a list of alerts.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Alerts.List(ctx) @@ -204,35 +285,56 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq sql.EditAlert -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.EditAlert, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.EditAlert + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().IntVar(&updateReq.Rearm, "rearm", updateReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) + cmd.Flags().IntVar(&updateReq.Rearm, "rearm", updateReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update an alert.`, - Long: `Update an alert. + cmd.Use = "update" + cmd.Short = `Update an alert.` + cmd.Long = `Update an alert. - Updates an alert.`, + Updates an alert.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -250,10 +352,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Alerts diff --git a/cmd/workspace/artifact-allowlists/artifact-allowlists.go b/cmd/workspace/artifact-allowlists/artifact-allowlists.go new file mode 100755 index 000000000..9f9b9be1c --- /dev/null +++ b/cmd/workspace/artifact-allowlists/artifact-allowlists.go @@ -0,0 +1,172 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package artifact_allowlists + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "artifact-allowlists", + Short: `In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the allowlist in UC so that users can leverage these artifacts on compute configured with shared access mode.`, + Long: `In Databricks Runtime 13.3 and above, you can add libraries and init scripts + to the allowlist in UC so that users can leverage these artifacts on compute + configured with shared access mode.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetArtifactAllowlistRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetArtifactAllowlistRequest + + // TODO: short flags + + cmd.Use = "get ARTIFACT_TYPE" + cmd.Short = `Get an artifact allowlist.` + cmd.Long = `Get an artifact allowlist. + + Get the artifact allowlist of a certain artifact type. The caller must be a + metastore admin.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + _, err = fmt.Sscan(args[0], &getReq.ArtifactType) + if err != nil { + return fmt.Errorf("invalid ARTIFACT_TYPE: %s", args[0]) + } + + response, err := w.ArtifactAllowlists.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.SetArtifactAllowlist, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.SetArtifactAllowlist + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update" + cmd.Short = `Set an artifact allowlist.` + cmd.Long = `Set an artifact allowlist. + + Set the artifact allowlist of a certain artifact type. The whole artifact + allowlist is replaced with the new allowlist. The caller must be a metastore + admin.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.ArtifactAllowlists.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service ArtifactAllowlists diff --git a/cmd/workspace/catalogs/catalogs.go b/cmd/workspace/catalogs/catalogs.go index 78b8b1499..5e06977c4 100755 --- a/cmd/workspace/catalogs/catalogs.go +++ b/cmd/workspace/catalogs/catalogs.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "catalogs", - Short: `A catalog is the first layer of Unity Catalog’s three-level namespace.`, - Long: `A catalog is the first layer of Unity Catalog’s three-level namespace. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "catalogs", + Short: `A catalog is the first layer of Unity Catalog’s three-level namespace.`, + Long: `A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission. @@ -21,46 +26,65 @@ var Cmd = &cobra.Command{ data centrally across all of the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateCatalog -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateCatalog, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateCatalog + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.ConnectionName, "connection-name", createReq.ConnectionName, `The name of the connection to an external data source.`) + // TODO: map via StringToStringVar: options // TODO: map via StringToStringVar: properties - createCmd.Flags().StringVar(&createReq.ProviderName, "provider-name", createReq.ProviderName, `The name of delta sharing provider.`) - createCmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, `The name of the share under the share provider.`) - createCmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within catalog.`) + cmd.Flags().StringVar(&createReq.ProviderName, "provider-name", createReq.ProviderName, `The name of delta sharing provider.`) + cmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, `The name of the share under the share provider.`) + cmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within catalog.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a catalog.`, - Long: `Create a catalog. + cmd.Use = "create NAME" + cmd.Short = `Create a catalog.` + cmd.Long = `Create a catalog. Creates a new catalog instance in the parent metastore if the caller is a - metastore admin or has the **CREATE_CATALOG** privilege.`, + metastore admin or has the **CREATE_CATALOG** privilege.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,38 +102,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteCatalogRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteCatalogRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteCatalogRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the catalog is not empty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the catalog is not empty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a catalog.`, - Long: `Delete a catalog. + cmd.Use = "delete NAME" + cmd.Short = `Delete a catalog.` + cmd.Long = `Delete a catalog. Deletes the catalog that matches the supplied name. The caller must be a - metastore admin or the owner of the catalog.`, + metastore admin or the owner of the catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -120,37 +166,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetCatalogRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetCatalogRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetCatalogRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a catalog.`, - Long: `Get a catalog. + cmd.Use = "get NAME" + cmd.Short = `Get a catalog.` + cmd.Long = `Get a catalog. Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** - privilege set for their account.`, + privilege set for their account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -161,33 +229,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List catalogs.`, - Long: `List catalogs. + cmd.Use = "list" + cmd.Short = `List catalogs.` + cmd.Long = `List catalogs. Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the - array.`, + array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Catalogs.ListAll(ctx) @@ -195,48 +281,71 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateCatalog -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateCatalog, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateCatalog + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of catalog.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of catalog.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of catalog.`) + // TODO: map via StringToStringVar: options + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of catalog.`) // TODO: map via StringToStringVar: properties -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a catalog.`, - Long: `Update a catalog. + cmd.Use = "update NAME" + cmd.Short = `Update a catalog.` + cmd.Long = `Update a catalog. Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field - of the catalog).`, + of the catalog).` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -254,10 +363,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Catalogs diff --git a/cmd/workspace/catalogs/overrides.go b/cmd/workspace/catalogs/overrides.go index 5b8cffea9..6de7a7771 100644 --- a/cmd/workspace/catalogs/overrides.go +++ b/cmd/workspace/catalogs/overrides.go @@ -1,10 +1,17 @@ package catalogs -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Name"}} {{header "Type"}} {{header "Comment"}} {{range .}}{{.Name|green}} {{blue "%s" .CatalogType}} {{.Comment}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/clean-rooms/clean-rooms.go b/cmd/workspace/clean-rooms/clean-rooms.go new file mode 100755 index 000000000..5aa704fa4 --- /dev/null +++ b/cmd/workspace/clean-rooms/clean-rooms.go @@ -0,0 +1,377 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package clean_rooms + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/sharing" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clean-rooms", + Short: `A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases.`, + Long: `A clean room is a secure, privacy-protecting environment where two or more + parties can share sensitive enterprise data, including customer data, for + measurements, insights, activation and other use cases. + + To create clean rooms, you must be a metastore admin or a user with the + **CREATE_CLEAN_ROOM** privilege.`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateCleanRoom, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateCleanRoom + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + + cmd.Use = "create" + cmd.Short = `Create a clean room.` + cmd.Long = `Create a clean room. + + Creates a new clean room with specified colaborators. The caller must be a + metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.CleanRooms.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteCleanRoomRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteCleanRoomRequest + + // TODO: short flags + + cmd.Use = "delete NAME_ARG" + cmd.Short = `Delete a clean room.` + cmd.Long = `Delete a clean room. + + Deletes a data object clean room from the metastore. The caller must be an + owner of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.NameArg = args[0] + + err = w.CleanRooms.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetCleanRoomRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetCleanRoomRequest + + // TODO: short flags + + cmd.Flags().BoolVar(&getReq.IncludeRemoteDetails, "include-remote-details", getReq.IncludeRemoteDetails, `Whether to include remote details (central) on the clean room.`) + + cmd.Use = "get NAME_ARG" + cmd.Short = `Get a clean room.` + cmd.Long = `Get a clean room. + + Gets a data object clean room from the metastore. The caller must be a + metastore admin or the owner of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getReq.NameArg = args[0] + + response, err := w.CleanRooms.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "list" + cmd.Short = `List clean rooms.` + cmd.Long = `List clean rooms. + + Gets an array of data object clean rooms from the metastore. The caller must + be a metastore admin or the owner of the clean room. There is no guarantee of + a specific ordering of the elements in the array.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.CleanRooms.ListAll(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateCleanRoom, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateCleanRoom + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: catalog_updates + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the clean room.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of clean room.`) + + cmd.Use = "update NAME_ARG" + cmd.Short = `Update a clean room.` + cmd.Long = `Update a clean room. + + Updates the clean room with the changes and data objects in the request. The + caller must be the owner of the clean room or a metastore admin. + + When the caller is a metastore admin, only the __owner__ field can be updated. + + In the case that the clean room name is changed **updateCleanRoom** requires + that the caller is both the clean room owner and a metastore admin. + + For each table that is added through this method, the clean room owner must + also have **SELECT** privilege on the table. The privilege must be maintained + indefinitely for recipients to be able to access the table. Typically, you + should use a group as the clean room owner. + + Table removals through **update** do not require additional privileges.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } + updateReq.NameArg = args[0] + + response, err := w.CleanRooms.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service CleanRooms diff --git a/cmd/workspace/cluster-policies/cluster-policies.go b/cmd/workspace/cluster-policies/cluster-policies.go index 1f1d36c1d..0e3091945 100755 --- a/cmd/workspace/cluster-policies/cluster-policies.go +++ b/cmd/workspace/cluster-policies/cluster-policies.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "cluster-policies", - Short: `Cluster policy limits the ability to configure clusters based on a set of rules.`, - Long: `Cluster policy limits the ability to configure clusters based on a set of +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "cluster-policies", + Short: `Cluster policy limits the ability to configure clusters based on a set of rules.`, + Long: `Cluster policy limits the ability to configure clusters based on a set of rules. The policy rules limit the attributes or attribute values available for cluster creation. Cluster policies have ACLs that limit their use to specific users and groups. @@ -39,45 +44,62 @@ var Cmd = &cobra.Command{ Only admin users can create, edit, and delete policies. Admin users also have access to all policies.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq compute.CreatePolicy -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.CreatePolicy, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq compute.CreatePolicy + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Definition, "definition", createReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) - createCmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `Additional human-readable description of the cluster policy.`) - createCmd.Flags().Int64Var(&createReq.MaxClustersPerUser, "max-clusters-per-user", createReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) - createCmd.Flags().StringVar(&createReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", createReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) - createCmd.Flags().StringVar(&createReq.PolicyFamilyId, "policy-family-id", createReq.PolicyFamilyId, `ID of the policy family.`) + cmd.Flags().StringVar(&createReq.Definition, "definition", createReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) + cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `Additional human-readable description of the cluster policy.`) + cmd.Flags().Int64Var(&createReq.MaxClustersPerUser, "max-clusters-per-user", createReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) + cmd.Flags().StringVar(&createReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", createReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) + cmd.Flags().StringVar(&createReq.PolicyFamilyId, "policy-family-id", createReq.PolicyFamilyId, `ID of the policy family.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a new policy.`, - Long: `Create a new policy. + cmd.Use = "create NAME" + cmd.Short = `Create a new policy.` + cmd.Long = `Create a new policy. - Creates a new policy with prescribed settings.`, + Creates a new policy with prescribed settings.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -95,34 +117,55 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeletePolicy -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeletePolicy, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq compute.DeletePolicy + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete POLICY_ID", - Short: `Delete a cluster policy.`, - Long: `Delete a cluster policy. + cmd.Use = "delete POLICY_ID" + cmd.Short = `Delete a cluster policy.` + cmd.Long = `Delete a cluster policy. Delete a policy for a cluster. Clusters governed by this policy can still run, - but cannot be edited.`, + but cannot be edited.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -157,47 +200,69 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq compute.EditPolicy -var editJson flags.JsonFlag -func init() { - Cmd.AddCommand(editCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.EditPolicy, +) + +func newEdit() *cobra.Command { + cmd := &cobra.Command{} + + var editReq compute.EditPolicy + var editJson flags.JsonFlag + // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().StringVar(&editReq.Definition, "definition", editReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) - editCmd.Flags().StringVar(&editReq.Description, "description", editReq.Description, `Additional human-readable description of the cluster policy.`) - editCmd.Flags().Int64Var(&editReq.MaxClustersPerUser, "max-clusters-per-user", editReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) - editCmd.Flags().StringVar(&editReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", editReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) - editCmd.Flags().StringVar(&editReq.PolicyFamilyId, "policy-family-id", editReq.PolicyFamilyId, `ID of the policy family.`) + cmd.Flags().StringVar(&editReq.Definition, "definition", editReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) + cmd.Flags().StringVar(&editReq.Description, "description", editReq.Description, `Additional human-readable description of the cluster policy.`) + cmd.Flags().Int64Var(&editReq.MaxClustersPerUser, "max-clusters-per-user", editReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) + cmd.Flags().StringVar(&editReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", editReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) + cmd.Flags().StringVar(&editReq.PolicyFamilyId, "policy-family-id", editReq.PolicyFamilyId, `ID of the policy family.`) -} - -var editCmd = &cobra.Command{ - Use: "edit POLICY_ID NAME", - Short: `Update a cluster policy.`, - Long: `Update a cluster policy. + cmd.Use = "edit POLICY_ID NAME" + cmd.Short = `Update a cluster policy.` + cmd.Long = `Update a cluster policy. Update an existing policy for cluster. This operation may make some clusters - governed by the previous policy invalid.`, + governed by the previous policy invalid.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -216,31 +281,52 @@ var editCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start get command -var getReq compute.GetClusterPolicyRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetClusterPolicyRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetClusterPolicyRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get POLICY_ID", - Short: `Get entity.`, - Long: `Get entity. + cmd.Use = "get POLICY_ID" + cmd.Short = `Get a cluster policy.` + cmd.Long = `Get a cluster policy. - Get a cluster policy entity. Creation and editing is available to admins only.`, + Get a cluster policy entity. Creation and editing is available to admins only.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -268,43 +354,212 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *compute.GetClusterPolicyPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq compute.GetClusterPolicyPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels CLUSTER_POLICY_ID" + cmd.Short = `Get cluster policy permission levels.` + cmd.Long = `Get cluster policy permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + getPermissionLevelsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *compute.GetClusterPolicyPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq compute.GetClusterPolicyPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions CLUSTER_POLICY_ID" + cmd.Short = `Get cluster policy permissions.` + cmd.Long = `Get cluster policy permissions. + + Gets the permissions of a cluster policy. Cluster policies can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + getPermissionsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -var listReq compute.ListClusterPoliciesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *compute.ListClusterPoliciesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq compute.ListClusterPoliciesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().Var(&listReq.SortColumn, "sort-column", `The cluster policy attribute to sort by.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order in which the policies get listed.`) + cmd.Flags().Var(&listReq.SortColumn, "sort-column", `The cluster policy attribute to sort by.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order in which the policies get listed.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get a cluster policy.`, - Long: `Get a cluster policy. + cmd.Use = "list" + cmd.Short = `List cluster policies.` + cmd.Long = `List cluster policies. - Returns a list of policies accessible by the requesting user.`, + Returns a list of policies accessible by the requesting user.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -321,10 +576,192 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPolicyPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq compute.ClusterPolicyPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions CLUSTER_POLICY_ID" + cmd.Short = `Set cluster policy permissions.` + cmd.Long = `Set cluster policy permissions. + + Sets permissions on a cluster policy. Cluster policies can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + setPermissionsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPolicyPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq compute.ClusterPolicyPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions CLUSTER_POLICY_ID" + cmd.Short = `Update cluster policy permissions.` + cmd.Long = `Update cluster policy permissions. + + Updates the permissions on a cluster policy. Cluster policies can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + updatePermissionsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service ClusterPolicies diff --git a/cmd/workspace/cluster-policies/overrides.go b/cmd/workspace/cluster-policies/overrides.go index dea5c6bfc..9278b29c3 100644 --- a/cmd/workspace/cluster-policies/overrides.go +++ b/cmd/workspace/cluster-policies/overrides.go @@ -1,11 +1,22 @@ package cluster_policies -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *compute.ListClusterPoliciesRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.PolicyId | green}} {{.Name}} {{end}}`) +} +func getOverride(getCmd *cobra.Command, _ *compute.GetClusterPolicyRequest) { getCmd.Annotations["template"] = cmdio.Heredoc(`{{.Definition | pretty_json}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) + getOverrides = append(getOverrides, getOverride) +} diff --git a/cmd/workspace/clusters/clusters.go b/cmd/workspace/clusters/clusters.go index bc891eef7..dce6753df 100755 --- a/cmd/workspace/clusters/clusters.go +++ b/cmd/workspace/clusters/clusters.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "clusters", - Short: `The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.`, - Long: `The Clusters API allows you to create, start, edit, list, terminate, and +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clusters", + Short: `The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.`, + Long: `The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. Databricks maps cluster node instance types to compute units known as DBUs. @@ -43,40 +48,57 @@ var Cmd = &cobra.Command{ recently terminated by the job scheduler. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start change-owner command -var changeOwnerReq compute.ChangeClusterOwner -var changeOwnerJson flags.JsonFlag -func init() { - Cmd.AddCommand(changeOwnerCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var changeOwnerOverrides []func( + *cobra.Command, + *compute.ChangeClusterOwner, +) + +func newChangeOwner() *cobra.Command { + cmd := &cobra.Command{} + + var changeOwnerReq compute.ChangeClusterOwner + var changeOwnerJson flags.JsonFlag + // TODO: short flags - changeOwnerCmd.Flags().Var(&changeOwnerJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&changeOwnerJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var changeOwnerCmd = &cobra.Command{ - Use: "change-owner CLUSTER_ID OWNER_USERNAME", - Short: `Change cluster owner.`, - Long: `Change cluster owner. + cmd.Use = "change-owner CLUSTER_ID OWNER_USERNAME" + cmd.Short = `Change cluster owner.` + cmd.Long = `Change cluster owner. Change the owner of the cluster. You must be an admin to perform this - operation.`, + operation.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -95,58 +117,80 @@ var changeOwnerCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range changeOwnerOverrides { + fn(cmd, &changeOwnerReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newChangeOwner()) + }) } // start create command -var createReq compute.CreateCluster -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.CreateCluster, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var createReq compute.CreateCluster + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.ApplyPolicyDefaultValues, "apply-policy-default-values", createReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) + cmd.Flags().BoolVar(&createReq.ApplyPolicyDefaultValues, "apply-policy-default-values", createReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) // TODO: complex arg: autoscale - createCmd.Flags().IntVar(&createReq.AutoterminationMinutes, "autotermination-minutes", createReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) + cmd.Flags().IntVar(&createReq.AutoterminationMinutes, "autotermination-minutes", createReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: complex arg: cluster_log_conf - createCmd.Flags().StringVar(&createReq.ClusterName, "cluster-name", createReq.ClusterName, `Cluster name requested by the user.`) - createCmd.Flags().Var(&createReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) + cmd.Flags().StringVar(&createReq.ClusterName, "cluster-name", createReq.ClusterName, `Cluster name requested by the user.`) + cmd.Flags().Var(&createReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) // TODO: map via StringToStringVar: custom_tags - createCmd.Flags().StringVar(&createReq.DriverInstancePoolId, "driver-instance-pool-id", createReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) - createCmd.Flags().StringVar(&createReq.DriverNodeTypeId, "driver-node-type-id", createReq.DriverNodeTypeId, `The node type of the Spark driver.`) - createCmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) - createCmd.Flags().BoolVar(&createReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", createReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) + cmd.Flags().Var(&createReq.DataSecurityMode, "data-security-mode", `This describes an enum.`) + // TODO: complex arg: docker_image + cmd.Flags().StringVar(&createReq.DriverInstancePoolId, "driver-instance-pool-id", createReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) + cmd.Flags().StringVar(&createReq.DriverNodeTypeId, "driver-node-type-id", createReq.DriverNodeTypeId, `The node type of the Spark driver.`) + cmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&createReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", createReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) // TODO: complex arg: gcp_attributes // TODO: array: init_scripts - createCmd.Flags().StringVar(&createReq.InstancePoolId, "instance-pool-id", createReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) - createCmd.Flags().StringVar(&createReq.NodeTypeId, "node-type-id", createReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) - createCmd.Flags().IntVar(&createReq.NumWorkers, "num-workers", createReq.NumWorkers, `Number of worker nodes that this cluster should have.`) - createCmd.Flags().StringVar(&createReq.PolicyId, "policy-id", createReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) - createCmd.Flags().Var(&createReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) + cmd.Flags().StringVar(&createReq.InstancePoolId, "instance-pool-id", createReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) + cmd.Flags().StringVar(&createReq.NodeTypeId, "node-type-id", createReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) + cmd.Flags().IntVar(&createReq.NumWorkers, "num-workers", createReq.NumWorkers, `Number of worker nodes that this cluster should have.`) + cmd.Flags().StringVar(&createReq.PolicyId, "policy-id", createReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) + cmd.Flags().Var(&createReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) + cmd.Flags().StringVar(&createReq.SingleUserName, "single-user-name", createReq.SingleUserName, `Single user name if data_security_mode is SINGLE_USER.`) // TODO: map via StringToStringVar: spark_conf // TODO: map via StringToStringVar: spark_env_vars // TODO: array: ssh_public_keys // TODO: complex arg: workload_type -} - -var createCmd = &cobra.Command{ - Use: "create SPARK_VERSION", - Short: `Create new cluster.`, - Long: `Create new cluster. + cmd.Use = "create SPARK_VERSION" + cmd.Short = `Create new cluster.` + cmd.Long = `Create new cluster. Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. Note: Databricks may not be able to acquire some @@ -155,18 +199,20 @@ var createCmd = &cobra.Command{ If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed. Otherwise the cluster will terminate with an - informative error message.`, + informative error message.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -196,42 +242,62 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeleteCluster -var deleteJson flags.JsonFlag -var deleteSkipWait bool -var deleteTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeleteCluster, +) -func init() { - Cmd.AddCommand(deleteCmd) +func newDelete() *cobra.Command { + cmd := &cobra.Command{} - deleteCmd.Flags().BoolVar(&deleteSkipWait, "no-wait", deleteSkipWait, `do not wait to reach TERMINATED state`) - deleteCmd.Flags().DurationVar(&deleteTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED state`) + var deleteReq compute.DeleteCluster + var deleteJson flags.JsonFlag + + var deleteSkipWait bool + var deleteTimeout time.Duration + + cmd.Flags().BoolVar(&deleteSkipWait, "no-wait", deleteSkipWait, `do not wait to reach TERMINATED state`) + cmd.Flags().DurationVar(&deleteTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED state`) // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete CLUSTER_ID", - Short: `Terminate cluster.`, - Long: `Terminate cluster. + cmd.Use = "delete CLUSTER_ID" + cmd.Short = `Terminate cluster.` + cmd.Long = `Terminate cluster. Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a TERMINATED state. If the cluster is already in a TERMINATING or - TERMINATED state, nothing will happen.`, + TERMINATED state, nothing will happen.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -278,61 +344,80 @@ var deleteCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq compute.EditCluster -var editJson flags.JsonFlag -var editSkipWait bool -var editTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.EditCluster, +) -func init() { - Cmd.AddCommand(editCmd) +func newEdit() *cobra.Command { + cmd := &cobra.Command{} - editCmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) - editCmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var editReq compute.EditCluster + var editJson flags.JsonFlag + + var editSkipWait bool + var editTimeout time.Duration + + cmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().BoolVar(&editReq.ApplyPolicyDefaultValues, "apply-policy-default-values", editReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) + cmd.Flags().BoolVar(&editReq.ApplyPolicyDefaultValues, "apply-policy-default-values", editReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) // TODO: complex arg: autoscale - editCmd.Flags().IntVar(&editReq.AutoterminationMinutes, "autotermination-minutes", editReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) + cmd.Flags().IntVar(&editReq.AutoterminationMinutes, "autotermination-minutes", editReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: complex arg: cluster_log_conf - editCmd.Flags().StringVar(&editReq.ClusterName, "cluster-name", editReq.ClusterName, `Cluster name requested by the user.`) - editCmd.Flags().Var(&editReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) + cmd.Flags().StringVar(&editReq.ClusterName, "cluster-name", editReq.ClusterName, `Cluster name requested by the user.`) + cmd.Flags().Var(&editReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) // TODO: map via StringToStringVar: custom_tags - editCmd.Flags().Var(&editReq.DataSecurityMode, "data-security-mode", `This describes an enum.`) + cmd.Flags().Var(&editReq.DataSecurityMode, "data-security-mode", `This describes an enum.`) // TODO: complex arg: docker_image - editCmd.Flags().StringVar(&editReq.DriverInstancePoolId, "driver-instance-pool-id", editReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) - editCmd.Flags().StringVar(&editReq.DriverNodeTypeId, "driver-node-type-id", editReq.DriverNodeTypeId, `The node type of the Spark driver.`) - editCmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) - editCmd.Flags().BoolVar(&editReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", editReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) + cmd.Flags().StringVar(&editReq.DriverInstancePoolId, "driver-instance-pool-id", editReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) + cmd.Flags().StringVar(&editReq.DriverNodeTypeId, "driver-node-type-id", editReq.DriverNodeTypeId, `The node type of the Spark driver.`) + cmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&editReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", editReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) // TODO: complex arg: gcp_attributes // TODO: array: init_scripts - editCmd.Flags().StringVar(&editReq.InstancePoolId, "instance-pool-id", editReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) - editCmd.Flags().StringVar(&editReq.NodeTypeId, "node-type-id", editReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) - editCmd.Flags().IntVar(&editReq.NumWorkers, "num-workers", editReq.NumWorkers, `Number of worker nodes that this cluster should have.`) - editCmd.Flags().StringVar(&editReq.PolicyId, "policy-id", editReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) - editCmd.Flags().Var(&editReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) - editCmd.Flags().StringVar(&editReq.SingleUserName, "single-user-name", editReq.SingleUserName, `Single user name if data_security_mode is SINGLE_USER.`) + cmd.Flags().StringVar(&editReq.InstancePoolId, "instance-pool-id", editReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) + cmd.Flags().StringVar(&editReq.NodeTypeId, "node-type-id", editReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) + cmd.Flags().IntVar(&editReq.NumWorkers, "num-workers", editReq.NumWorkers, `Number of worker nodes that this cluster should have.`) + cmd.Flags().StringVar(&editReq.PolicyId, "policy-id", editReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) + cmd.Flags().Var(&editReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) + cmd.Flags().StringVar(&editReq.SingleUserName, "single-user-name", editReq.SingleUserName, `Single user name if data_security_mode is SINGLE_USER.`) // TODO: map via StringToStringVar: spark_conf // TODO: map via StringToStringVar: spark_env_vars // TODO: array: ssh_public_keys // TODO: complex arg: workload_type -} - -var editCmd = &cobra.Command{ - Use: "edit CLUSTER_ID SPARK_VERSION", - Short: `Update cluster configuration.`, - Long: `Update cluster configuration. + cmd.Use = "edit CLUSTER_ID SPARK_VERSION" + cmd.Short = `Update cluster configuration.` + cmd.Long = `Update cluster configuration. Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a RUNNING or TERMINATED state. @@ -345,18 +430,20 @@ var editCmd = &cobra.Command{ new attributes will take effect. Any attempt to update a cluster in any other state will be rejected with an INVALID_STATE error code. - Clusters created by the Databricks Jobs service cannot be edited.`, + Clusters created by the Databricks Jobs service cannot be edited.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -387,42 +474,63 @@ var editCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start events command -var eventsReq compute.GetEvents -var eventsJson flags.JsonFlag -func init() { - Cmd.AddCommand(eventsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var eventsOverrides []func( + *cobra.Command, + *compute.GetEvents, +) + +func newEvents() *cobra.Command { + cmd := &cobra.Command{} + + var eventsReq compute.GetEvents + var eventsJson flags.JsonFlag + // TODO: short flags - eventsCmd.Flags().Var(&eventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&eventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - eventsCmd.Flags().Int64Var(&eventsReq.EndTime, "end-time", eventsReq.EndTime, `The end time in epoch milliseconds.`) + cmd.Flags().Int64Var(&eventsReq.EndTime, "end-time", eventsReq.EndTime, `The end time in epoch milliseconds.`) // TODO: array: event_types - eventsCmd.Flags().Int64Var(&eventsReq.Limit, "limit", eventsReq.Limit, `The maximum number of events to include in a page of events.`) - eventsCmd.Flags().Int64Var(&eventsReq.Offset, "offset", eventsReq.Offset, `The offset in the result set.`) - eventsCmd.Flags().Var(&eventsReq.Order, "order", `The order to list events in; either "ASC" or "DESC".`) - eventsCmd.Flags().Int64Var(&eventsReq.StartTime, "start-time", eventsReq.StartTime, `The start time in epoch milliseconds.`) + cmd.Flags().Int64Var(&eventsReq.Limit, "limit", eventsReq.Limit, `The maximum number of events to include in a page of events.`) + cmd.Flags().Int64Var(&eventsReq.Offset, "offset", eventsReq.Offset, `The offset in the result set.`) + cmd.Flags().Var(&eventsReq.Order, "order", `The order to list events in; either "ASC" or "DESC".`) + cmd.Flags().Int64Var(&eventsReq.StartTime, "start-time", eventsReq.StartTime, `The start time in epoch milliseconds.`) -} - -var eventsCmd = &cobra.Command{ - Use: "events CLUSTER_ID", - Short: `List cluster activity events.`, - Long: `List cluster activity events. + cmd.Use = "events CLUSTER_ID" + cmd.Short = `List cluster activity events.` + cmd.Long = `List cluster activity events. Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the - nparameters necessary to request the next page of events.`, + nparameters necessary to request the next page of events.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -457,38 +565,58 @@ var eventsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range eventsOverrides { + fn(cmd, &eventsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEvents()) + }) } // start get command -var getReq compute.GetClusterRequest -var getSkipWait bool -var getTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetClusterRequest, +) -func init() { - Cmd.AddCommand(getCmd) +func newGet() *cobra.Command { + cmd := &cobra.Command{} - getCmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) - getCmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var getReq compute.GetClusterRequest + + var getSkipWait bool + var getTimeout time.Duration + + cmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CLUSTER_ID", - Short: `Get cluster info.`, - Long: `Get cluster info. + cmd.Use = "get CLUSTER_ID" + cmd.Short = `Get cluster info.` + cmd.Long = `Get cluster info. Retrieves the information for a cluster given its identifier. Clusters can be - described while they are running, or up to 60 days after they are terminated.`, + described while they are running, or up to 60 days after they are terminated.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -516,29 +644,196 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *compute.GetClusterPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq compute.GetClusterPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels CLUSTER_ID" + cmd.Short = `Get cluster permission levels.` + cmd.Long = `Get cluster permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + getPermissionLevelsReq.ClusterId = args[0] + + response, err := w.Clusters.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *compute.GetClusterPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq compute.GetClusterPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions CLUSTER_ID" + cmd.Short = `Get cluster permissions.` + cmd.Long = `Get cluster permissions. + + Gets the permissions of a cluster. Clusters can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + getPermissionsReq.ClusterId = args[0] + + response, err := w.Clusters.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -var listReq compute.ListClustersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *compute.ListClustersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq compute.ListClustersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.CanUseClient, "can-use-client", listReq.CanUseClient, `Filter clusters based on what type of client it can be used for.`) + cmd.Flags().StringVar(&listReq.CanUseClient, "can-use-client", listReq.CanUseClient, `Filter clusters based on what type of client it can be used for.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List all clusters.`, - Long: `List all clusters. + cmd.Use = "list" + cmd.Short = `List all clusters.` + cmd.Long = `List all clusters. Return information about all pinned clusters, active clusters, up to 200 of the most recently terminated all-purpose clusters in the past 30 days, and up @@ -548,18 +843,20 @@ var listCmd = &cobra.Command{ all-purpose clusters in the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1 pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently - terminated job clusters.`, + terminated job clusters.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -576,30 +873,48 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-node-types command -func init() { - Cmd.AddCommand(listNodeTypesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listNodeTypesOverrides []func( + *cobra.Command, +) -} +func newListNodeTypes() *cobra.Command { + cmd := &cobra.Command{} -var listNodeTypesCmd = &cobra.Command{ - Use: "list-node-types", - Short: `List node types.`, - Long: `List node types. + cmd.Use = "list-node-types" + cmd.Short = `List node types.` + cmd.Long = `List node types. Returns a list of supported Spark node types. These node types can be used to - launch a cluster.`, + launch a cluster.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Clusters.ListNodeTypes(ctx) @@ -607,30 +922,48 @@ var listNodeTypesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listNodeTypesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListNodeTypes()) + }) } // start list-zones command -func init() { - Cmd.AddCommand(listZonesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listZonesOverrides []func( + *cobra.Command, +) -} +func newListZones() *cobra.Command { + cmd := &cobra.Command{} -var listZonesCmd = &cobra.Command{ - Use: "list-zones", - Short: `List availability zones.`, - Long: `List availability zones. + cmd.Use = "list-zones" + cmd.Short = `List availability zones.` + cmd.Long = `List availability zones. Returns a list of availability zones where clusters can be created in (For - example, us-west-2a). These zones can be used to launch a cluster.`, + example, us-west-2a). These zones can be used to launch a cluster.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Clusters.ListZones(ctx) @@ -638,38 +971,59 @@ var listZonesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listZonesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListZones()) + }) } // start permanent-delete command -var permanentDeleteReq compute.PermanentDeleteCluster -var permanentDeleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(permanentDeleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var permanentDeleteOverrides []func( + *cobra.Command, + *compute.PermanentDeleteCluster, +) + +func newPermanentDelete() *cobra.Command { + cmd := &cobra.Command{} + + var permanentDeleteReq compute.PermanentDeleteCluster + var permanentDeleteJson flags.JsonFlag + // TODO: short flags - permanentDeleteCmd.Flags().Var(&permanentDeleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&permanentDeleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var permanentDeleteCmd = &cobra.Command{ - Use: "permanent-delete CLUSTER_ID", - Short: `Permanently delete cluster.`, - Long: `Permanently delete cluster. + cmd.Use = "permanent-delete CLUSTER_ID" + cmd.Short = `Permanently delete cluster.` + cmd.Long = `Permanently delete cluster. Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. In addition, users will no longer see permanently deleted clusters in the cluster list, and API users can no longer perform any action on permanently - deleted clusters.`, + deleted clusters.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -704,35 +1058,56 @@ var permanentDeleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range permanentDeleteOverrides { + fn(cmd, &permanentDeleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPermanentDelete()) + }) } // start pin command -var pinReq compute.PinCluster -var pinJson flags.JsonFlag -func init() { - Cmd.AddCommand(pinCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var pinOverrides []func( + *cobra.Command, + *compute.PinCluster, +) + +func newPin() *cobra.Command { + cmd := &cobra.Command{} + + var pinReq compute.PinCluster + var pinJson flags.JsonFlag + // TODO: short flags - pinCmd.Flags().Var(&pinJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&pinJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var pinCmd = &cobra.Command{ - Use: "pin CLUSTER_ID", - Short: `Pin cluster.`, - Long: `Pin cluster. + cmd.Use = "pin CLUSTER_ID" + cmd.Short = `Pin cluster.` + cmd.Long = `Pin cluster. Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no - effect. This API can only be called by workspace admins.`, + effect. This API can only be called by workspace admins.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -767,43 +1142,63 @@ var pinCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range pinOverrides { + fn(cmd, &pinReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPin()) + }) } // start resize command -var resizeReq compute.ResizeCluster -var resizeJson flags.JsonFlag -var resizeSkipWait bool -var resizeTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resizeOverrides []func( + *cobra.Command, + *compute.ResizeCluster, +) -func init() { - Cmd.AddCommand(resizeCmd) +func newResize() *cobra.Command { + cmd := &cobra.Command{} - resizeCmd.Flags().BoolVar(&resizeSkipWait, "no-wait", resizeSkipWait, `do not wait to reach RUNNING state`) - resizeCmd.Flags().DurationVar(&resizeTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var resizeReq compute.ResizeCluster + var resizeJson flags.JsonFlag + + var resizeSkipWait bool + var resizeTimeout time.Duration + + cmd.Flags().BoolVar(&resizeSkipWait, "no-wait", resizeSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&resizeTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - resizeCmd.Flags().Var(&resizeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&resizeJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: autoscale - resizeCmd.Flags().IntVar(&resizeReq.NumWorkers, "num-workers", resizeReq.NumWorkers, `Number of worker nodes that this cluster should have.`) + cmd.Flags().IntVar(&resizeReq.NumWorkers, "num-workers", resizeReq.NumWorkers, `Number of worker nodes that this cluster should have.`) -} - -var resizeCmd = &cobra.Command{ - Use: "resize CLUSTER_ID", - Short: `Resize cluster.`, - Long: `Resize cluster. + cmd.Use = "resize CLUSTER_ID" + cmd.Short = `Resize cluster.` + cmd.Long = `Resize cluster. Resizes a cluster to have a desired number of workers. This will fail unless - the cluster is in a RUNNING state.`, + the cluster is in a RUNNING state.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -850,42 +1245,62 @@ var resizeCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resizeOverrides { + fn(cmd, &resizeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newResize()) + }) } // start restart command -var restartReq compute.RestartCluster -var restartJson flags.JsonFlag -var restartSkipWait bool -var restartTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restartOverrides []func( + *cobra.Command, + *compute.RestartCluster, +) -func init() { - Cmd.AddCommand(restartCmd) +func newRestart() *cobra.Command { + cmd := &cobra.Command{} - restartCmd.Flags().BoolVar(&restartSkipWait, "no-wait", restartSkipWait, `do not wait to reach RUNNING state`) - restartCmd.Flags().DurationVar(&restartTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var restartReq compute.RestartCluster + var restartJson flags.JsonFlag + + var restartSkipWait bool + var restartTimeout time.Duration + + cmd.Flags().BoolVar(&restartSkipWait, "no-wait", restartSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&restartTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - restartCmd.Flags().Var(&restartJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&restartJson, "json", `either inline JSON string or @path/to/file.json with request body`) - restartCmd.Flags().StringVar(&restartReq.RestartUser, "restart-user", restartReq.RestartUser, `.`) + cmd.Flags().StringVar(&restartReq.RestartUser, "restart-user", restartReq.RestartUser, `.`) -} - -var restartCmd = &cobra.Command{ - Use: "restart CLUSTER_ID", - Short: `Restart cluster.`, - Long: `Restart cluster. + cmd.Use = "restart CLUSTER_ID" + cmd.Short = `Restart cluster.` + cmd.Long = `Restart cluster. Restarts a Spark cluster with the supplied ID. If the cluster is not currently - in a RUNNING state, nothing will happen.`, + in a RUNNING state, nothing will happen.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -932,30 +1347,132 @@ var restartCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restartOverrides { + fn(cmd, &restartReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestart()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq compute.ClusterPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions CLUSTER_ID" + cmd.Short = `Set cluster permissions.` + cmd.Long = `Set cluster permissions. + + Sets permissions on a cluster. Clusters can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + setPermissionsReq.ClusterId = args[0] + + response, err := w.Clusters.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start spark-versions command -func init() { - Cmd.AddCommand(sparkVersionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var sparkVersionsOverrides []func( + *cobra.Command, +) -} +func newSparkVersions() *cobra.Command { + cmd := &cobra.Command{} -var sparkVersionsCmd = &cobra.Command{ - Use: "spark-versions", - Short: `List available Spark versions.`, - Long: `List available Spark versions. + cmd.Use = "spark-versions" + cmd.Short = `List available Spark versions.` + cmd.Long = `List available Spark versions. Returns the list of available Spark versions. These versions can be used to - launch a cluster.`, + launch a cluster.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Clusters.SparkVersions(ctx) @@ -963,33 +1480,52 @@ var sparkVersionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range sparkVersionsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSparkVersions()) + }) } // start start command -var startReq compute.StartCluster -var startJson flags.JsonFlag -var startSkipWait bool -var startTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startOverrides []func( + *cobra.Command, + *compute.StartCluster, +) -func init() { - Cmd.AddCommand(startCmd) +func newStart() *cobra.Command { + cmd := &cobra.Command{} - startCmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) - startCmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var startReq compute.StartCluster + var startJson flags.JsonFlag + + var startSkipWait bool + var startTimeout time.Duration + + cmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - startCmd.Flags().Var(&startJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&startJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var startCmd = &cobra.Command{ - Use: "start CLUSTER_ID", - Short: `Start terminated cluster.`, - Long: `Start terminated cluster. + cmd.Use = "start CLUSTER_ID" + cmd.Short = `Start terminated cluster.` + cmd.Long = `Start terminated cluster. Starts a terminated Spark cluster with the supplied ID. This works similar to createCluster except: @@ -998,11 +1534,12 @@ var startCmd = &cobra.Command{ with the last specified cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. * If the cluster is not currently in a TERMINATED state, nothing will - happen. * Clusters launched to run a job cannot be started.`, + happen. * Clusters launched to run a job cannot be started.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1049,35 +1586,56 @@ var startCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startOverrides { + fn(cmd, &startReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStart()) + }) } // start unpin command -var unpinReq compute.UnpinCluster -var unpinJson flags.JsonFlag -func init() { - Cmd.AddCommand(unpinCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var unpinOverrides []func( + *cobra.Command, + *compute.UnpinCluster, +) + +func newUnpin() *cobra.Command { + cmd := &cobra.Command{} + + var unpinReq compute.UnpinCluster + var unpinJson flags.JsonFlag + // TODO: short flags - unpinCmd.Flags().Var(&unpinJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&unpinJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var unpinCmd = &cobra.Command{ - Use: "unpin CLUSTER_ID", - Short: `Unpin cluster.`, - Long: `Unpin cluster. + cmd.Use = "unpin CLUSTER_ID" + cmd.Short = `Unpin cluster.` + cmd.Long = `Unpin cluster. Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. - This API can only be called by workspace admins.`, + This API can only be called by workspace admins.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1112,10 +1670,108 @@ var unpinCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range unpinOverrides { + fn(cmd, &unpinReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUnpin()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq compute.ClusterPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions CLUSTER_ID" + cmd.Short = `Update cluster permissions.` + cmd.Long = `Update cluster permissions. + + Updates the permissions on a cluster. Clusters can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + updatePermissionsReq.ClusterId = args[0] + + response, err := w.Clusters.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Clusters diff --git a/cmd/workspace/clusters/overrides.go b/cmd/workspace/clusters/overrides.go index 1e8818952..ab32a4cd8 100644 --- a/cmd/workspace/clusters/overrides.go +++ b/cmd/workspace/clusters/overrides.go @@ -1,19 +1,33 @@ package clusters -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *compute.ListClustersRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "State"}} {{range .}}{{.ClusterId | green}} {{.ClusterName | cyan}} {{if eq .State "RUNNING"}}{{green "%s" .State}}{{else if eq .State "TERMINATED"}}{{red "%s" .State}}{{else}}{{blue "%s" .State}}{{end}} {{end}}`) +} +func listNodeTypesOverride(listNodeTypesCmd *cobra.Command) { listNodeTypesCmd.Annotations["template"] = cmdio.Heredoc(` {{range .NodeTypes}}{{.NodeTypeId | green}} {{.NumCores}} {{.MemoryMb}} {{.Category | blue}} {{end}}`) +} +func sparkVersionsOverride(sparkVersionsCmd *cobra.Command) { sparkVersionsCmd.Annotations["template"] = cmdio.Heredoc(` {{range .Versions}}{{.Key | green}} {{.Name}} {{end}} `) } + +func init() { + listOverrides = append(listOverrides, listOverride) + listNodeTypesOverrides = append(listNodeTypesOverrides, listNodeTypesOverride) + sparkVersionsOverrides = append(sparkVersionsOverrides, sparkVersionsOverride) +} diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 68ce3ef06..495d8066d 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -3,14 +3,15 @@ package workspace import ( - "github.com/databricks/cli/cmd/root" - alerts "github.com/databricks/cli/cmd/workspace/alerts" + artifact_allowlists "github.com/databricks/cli/cmd/workspace/artifact-allowlists" catalogs "github.com/databricks/cli/cmd/workspace/catalogs" + clean_rooms "github.com/databricks/cli/cmd/workspace/clean-rooms" cluster_policies "github.com/databricks/cli/cmd/workspace/cluster-policies" clusters "github.com/databricks/cli/cmd/workspace/clusters" connections "github.com/databricks/cli/cmd/workspace/connections" current_user "github.com/databricks/cli/cmd/workspace/current-user" + dashboard_widgets "github.com/databricks/cli/cmd/workspace/dashboard-widgets" dashboards "github.com/databricks/cli/cmd/workspace/dashboards" data_sources "github.com/databricks/cli/cmd/workspace/data-sources" experiments "github.com/databricks/cli/cmd/workspace/experiments" @@ -27,14 +28,17 @@ import ( libraries "github.com/databricks/cli/cmd/workspace/libraries" metastores "github.com/databricks/cli/cmd/workspace/metastores" model_registry "github.com/databricks/cli/cmd/workspace/model-registry" + model_versions "github.com/databricks/cli/cmd/workspace/model-versions" permissions "github.com/databricks/cli/cmd/workspace/permissions" pipelines "github.com/databricks/cli/cmd/workspace/pipelines" policy_families "github.com/databricks/cli/cmd/workspace/policy-families" providers "github.com/databricks/cli/cmd/workspace/providers" queries "github.com/databricks/cli/cmd/workspace/queries" query_history "github.com/databricks/cli/cmd/workspace/query-history" + query_visualizations "github.com/databricks/cli/cmd/workspace/query-visualizations" recipient_activation "github.com/databricks/cli/cmd/workspace/recipient-activation" recipients "github.com/databricks/cli/cmd/workspace/recipients" + registered_models "github.com/databricks/cli/cmd/workspace/registered-models" repos "github.com/databricks/cli/cmd/workspace/repos" schemas "github.com/databricks/cli/cmd/workspace/schemas" secrets "github.com/databricks/cli/cmd/workspace/secrets" @@ -53,106 +57,66 @@ import ( workspace "github.com/databricks/cli/cmd/workspace/workspace" workspace_bindings "github.com/databricks/cli/cmd/workspace/workspace-bindings" workspace_conf "github.com/databricks/cli/cmd/workspace/workspace-conf" + "github.com/spf13/cobra" ) -func init() { - root.RootCmd.AddCommand(alerts.Cmd) - root.RootCmd.AddCommand(catalogs.Cmd) - root.RootCmd.AddCommand(cluster_policies.Cmd) - root.RootCmd.AddCommand(clusters.Cmd) - root.RootCmd.AddCommand(connections.Cmd) - root.RootCmd.AddCommand(current_user.Cmd) - root.RootCmd.AddCommand(dashboards.Cmd) - root.RootCmd.AddCommand(data_sources.Cmd) - root.RootCmd.AddCommand(experiments.Cmd) - root.RootCmd.AddCommand(external_locations.Cmd) - root.RootCmd.AddCommand(functions.Cmd) - root.RootCmd.AddCommand(git_credentials.Cmd) - root.RootCmd.AddCommand(global_init_scripts.Cmd) - root.RootCmd.AddCommand(grants.Cmd) - root.RootCmd.AddCommand(groups.Cmd) - root.RootCmd.AddCommand(instance_pools.Cmd) - root.RootCmd.AddCommand(instance_profiles.Cmd) - root.RootCmd.AddCommand(ip_access_lists.Cmd) - root.RootCmd.AddCommand(jobs.Cmd) - root.RootCmd.AddCommand(libraries.Cmd) - root.RootCmd.AddCommand(metastores.Cmd) - root.RootCmd.AddCommand(model_registry.Cmd) - root.RootCmd.AddCommand(permissions.Cmd) - root.RootCmd.AddCommand(pipelines.Cmd) - root.RootCmd.AddCommand(policy_families.Cmd) - root.RootCmd.AddCommand(providers.Cmd) - root.RootCmd.AddCommand(queries.Cmd) - root.RootCmd.AddCommand(query_history.Cmd) - root.RootCmd.AddCommand(recipient_activation.Cmd) - root.RootCmd.AddCommand(recipients.Cmd) - root.RootCmd.AddCommand(repos.Cmd) - root.RootCmd.AddCommand(schemas.Cmd) - root.RootCmd.AddCommand(secrets.Cmd) - root.RootCmd.AddCommand(service_principals.Cmd) - root.RootCmd.AddCommand(serving_endpoints.Cmd) - root.RootCmd.AddCommand(shares.Cmd) - root.RootCmd.AddCommand(storage_credentials.Cmd) - root.RootCmd.AddCommand(system_schemas.Cmd) - root.RootCmd.AddCommand(table_constraints.Cmd) - root.RootCmd.AddCommand(tables.Cmd) - root.RootCmd.AddCommand(token_management.Cmd) - root.RootCmd.AddCommand(tokens.Cmd) - root.RootCmd.AddCommand(users.Cmd) - root.RootCmd.AddCommand(volumes.Cmd) - root.RootCmd.AddCommand(warehouses.Cmd) - root.RootCmd.AddCommand(workspace.Cmd) - root.RootCmd.AddCommand(workspace_bindings.Cmd) - root.RootCmd.AddCommand(workspace_conf.Cmd) +func All() []*cobra.Command { + var out []*cobra.Command - // Register commands with groups - alerts.Cmd.GroupID = "sql" - catalogs.Cmd.GroupID = "catalog" - cluster_policies.Cmd.GroupID = "compute" - clusters.Cmd.GroupID = "compute" - connections.Cmd.GroupID = "catalog" - current_user.Cmd.GroupID = "iam" - dashboards.Cmd.GroupID = "sql" - data_sources.Cmd.GroupID = "sql" - experiments.Cmd.GroupID = "ml" - external_locations.Cmd.GroupID = "catalog" - functions.Cmd.GroupID = "catalog" - git_credentials.Cmd.GroupID = "workspace" - global_init_scripts.Cmd.GroupID = "compute" - grants.Cmd.GroupID = "catalog" - groups.Cmd.GroupID = "iam" - instance_pools.Cmd.GroupID = "compute" - instance_profiles.Cmd.GroupID = "compute" - ip_access_lists.Cmd.GroupID = "settings" - jobs.Cmd.GroupID = "jobs" - libraries.Cmd.GroupID = "compute" - metastores.Cmd.GroupID = "catalog" - model_registry.Cmd.GroupID = "ml" - permissions.Cmd.GroupID = "iam" - pipelines.Cmd.GroupID = "pipelines" - policy_families.Cmd.GroupID = "compute" - providers.Cmd.GroupID = "sharing" - queries.Cmd.GroupID = "sql" - query_history.Cmd.GroupID = "sql" - recipient_activation.Cmd.GroupID = "sharing" - recipients.Cmd.GroupID = "sharing" - repos.Cmd.GroupID = "workspace" - schemas.Cmd.GroupID = "catalog" - secrets.Cmd.GroupID = "workspace" - service_principals.Cmd.GroupID = "iam" - serving_endpoints.Cmd.GroupID = "serving" - shares.Cmd.GroupID = "sharing" - storage_credentials.Cmd.GroupID = "catalog" - system_schemas.Cmd.GroupID = "catalog" - table_constraints.Cmd.GroupID = "catalog" - tables.Cmd.GroupID = "catalog" - token_management.Cmd.GroupID = "settings" - tokens.Cmd.GroupID = "settings" - users.Cmd.GroupID = "iam" - volumes.Cmd.GroupID = "catalog" - warehouses.Cmd.GroupID = "sql" - workspace.Cmd.GroupID = "workspace" - workspace_bindings.Cmd.GroupID = "catalog" - workspace_conf.Cmd.GroupID = "settings" + out = append(out, alerts.New()) + out = append(out, artifact_allowlists.New()) + out = append(out, catalogs.New()) + out = append(out, clean_rooms.New()) + out = append(out, cluster_policies.New()) + out = append(out, clusters.New()) + out = append(out, connections.New()) + out = append(out, current_user.New()) + out = append(out, dashboard_widgets.New()) + out = append(out, dashboards.New()) + out = append(out, data_sources.New()) + out = append(out, experiments.New()) + out = append(out, external_locations.New()) + out = append(out, functions.New()) + out = append(out, git_credentials.New()) + out = append(out, global_init_scripts.New()) + out = append(out, grants.New()) + out = append(out, groups.New()) + out = append(out, instance_pools.New()) + out = append(out, instance_profiles.New()) + out = append(out, ip_access_lists.New()) + out = append(out, jobs.New()) + out = append(out, libraries.New()) + out = append(out, metastores.New()) + out = append(out, model_registry.New()) + out = append(out, model_versions.New()) + out = append(out, permissions.New()) + out = append(out, pipelines.New()) + out = append(out, policy_families.New()) + out = append(out, providers.New()) + out = append(out, queries.New()) + out = append(out, query_history.New()) + out = append(out, query_visualizations.New()) + out = append(out, recipient_activation.New()) + out = append(out, recipients.New()) + out = append(out, registered_models.New()) + out = append(out, repos.New()) + out = append(out, schemas.New()) + out = append(out, secrets.New()) + out = append(out, service_principals.New()) + out = append(out, serving_endpoints.New()) + out = append(out, shares.New()) + out = append(out, storage_credentials.New()) + out = append(out, system_schemas.New()) + out = append(out, table_constraints.New()) + out = append(out, tables.New()) + out = append(out, token_management.New()) + out = append(out, tokens.New()) + out = append(out, users.New()) + out = append(out, volumes.New()) + out = append(out, warehouses.New()) + out = append(out, workspace.New()) + out = append(out, workspace_bindings.New()) + out = append(out, workspace_conf.New()) + return out } diff --git a/cmd/workspace/connections/connections.go b/cmd/workspace/connections/connections.go index 146fdba9d..c25825c9d 100755 --- a/cmd/workspace/connections/connections.go +++ b/cmd/workspace/connections/connections.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "connections", - Short: `Connections allow for creating a connection to an external data source.`, - Long: `Connections allow for creating a connection to an external data source. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "connections", + Short: `Connections allow for creating a connection to an external data source.`, + Long: `Connections allow for creating a connection to an external data source. A connection is an abstraction of an external data source that can be connected from Databricks Compute. Creating a connection object is the first @@ -26,44 +31,57 @@ var Cmd = &cobra.Command{ may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } - // This service is being previewed; hide from help output. - Hidden: true, + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateConnection -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateConnection, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateConnection + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - createCmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of current owner of the connection.`) - // TODO: map via StringToStringVar: properties_kvpairs - createCmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `If the connection is read only.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of current owner of the connection.`) + // TODO: map via StringToStringVar: properties + cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `If the connection is read only.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a connection.`, - Long: `Create a connection. + cmd.Use = "create" + cmd.Short = `Create a connection.` + cmd.Long = `Create a connection. Creates a new connection Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the - external server.`, + external server.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,31 +99,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteConnectionRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteConnectionRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteConnectionRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME_ARG", - Short: `Delete a connection.`, - Long: `Delete a connection. + cmd.Use = "delete NAME_ARG" + cmd.Short = `Delete a connection.` + cmd.Long = `Delete a connection. - Deletes the connection that matches the supplied name.`, + Deletes the connection that matches the supplied name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -133,31 +172,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetConnectionRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetConnectionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetConnectionRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME_ARG", - Short: `Get a connection.`, - Long: `Get a connection. + cmd.Use = "get NAME_ARG" + cmd.Short = `Get a connection.` + cmd.Long = `Get a connection. - Gets a connection from it's name.`, + Gets a connection from it's name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -185,29 +245,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List connections.`, - Long: `List connections. + cmd.Use = "list" + cmd.Short = `List connections.` + cmd.Long = `List connections. - List all connections.`, + List all connections.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Connections.ListAll(ctx) @@ -215,33 +293,54 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateConnection -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateConnection, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateConnection + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update a connection.`, - Long: `Update a connection. + cmd.Use = "update" + cmd.Short = `Update a connection.` + cmd.Long = `Update a connection. - Updates the connection that matches the supplied name.`, + Updates the connection that matches the supplied name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -259,10 +358,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Connections diff --git a/cmd/workspace/current-user/current-user.go b/cmd/workspace/current-user/current-user.go index 80e539ec9..cb18e71d2 100755 --- a/cmd/workspace/current-user/current-user.go +++ b/cmd/workspace/current-user/current-user.go @@ -8,33 +8,51 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "current-user", - Short: `This API allows retrieving information about currently authenticated user or service principal.`, - Long: `This API allows retrieving information about currently authenticated user or +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "current-user", + Short: `This API allows retrieving information about currently authenticated user or service principal.`, + Long: `This API allows retrieving information about currently authenticated user or service principal.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start me command -func init() { - Cmd.AddCommand(meCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var meOverrides []func( + *cobra.Command, +) -} +func newMe() *cobra.Command { + cmd := &cobra.Command{} -var meCmd = &cobra.Command{ - Use: "me", - Short: `Get current user info.`, - Long: `Get current user info. + cmd.Use = "me" + cmd.Short = `Get current user info.` + cmd.Long = `Get current user info. - Get details about the current method caller's identity.`, + Get details about the current method caller's identity.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.CurrentUser.Me(ctx) @@ -42,10 +60,24 @@ var meCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range meOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newMe()) + }) } // end service CurrentUser diff --git a/cmd/workspace/dashboard-widgets/dashboard-widgets.go b/cmd/workspace/dashboard-widgets/dashboard-widgets.go new file mode 100755 index 000000000..63e8d120c --- /dev/null +++ b/cmd/workspace/dashboard-widgets/dashboard-widgets.go @@ -0,0 +1,228 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package dashboard_widgets + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "dashboard-widgets", + Short: `This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace.`, + Long: `This is an evolving API that facilitates the addition and removal of widgets + from existing dashboards within the Databricks Workspace. Data structures may + change over time.`, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateWidget, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateWidget + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "create" + cmd.Short = `Add widget to a dashboard.` + cmd.Long = `Add widget to a dashboard.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.DashboardWidgets.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteDashboardWidgetRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteDashboardWidgetRequest + + // TODO: short flags + + cmd.Use = "delete ID" + cmd.Short = `Remove widget.` + cmd.Long = `Remove widget.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.DashboardWidgets.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.CreateWidget, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.CreateWidget + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update" + cmd.Short = `Update existing widget.` + cmd.Long = `Update existing widget.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.DashboardWidgets.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service DashboardWidgets diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index b18ddff8c..2335ee28e 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -12,51 +12,68 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "dashboards", - Short: `In general, there is little need to modify dashboards using the API.`, - Long: `In general, there is little need to modify dashboards using the API. However, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "dashboards", + Short: `In general, there is little need to modify dashboards using the API.`, + Long: `In general, there is little need to modify dashboards using the API. However, it can be useful to use dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it to create a new one. Dashboards can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.CreateDashboardRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateDashboardRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateDashboardRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.IsFavorite, "is-favorite", createReq.IsFavorite, `Indicates whether this query object should appear in the current user's favorites list.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `The title of this dashboard that appears in list views and at the top of the dashboard page.`) - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the dashboard.`) - // TODO: array: tags + cmd.Use = "create NAME" + cmd.Short = `Create a dashboard object.` + cmd.Long = `Create a dashboard object.` -} + cmd.Annotations = make(map[string]string) -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a dashboard object.`, - Long: `Create a dashboard object.`, - - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(0) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -66,6 +83,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Dashboards.Create(ctx, createReq) @@ -73,32 +91,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteDashboardRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteDashboardRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteDashboardRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete DASHBOARD_ID", - Short: `Remove a dashboard.`, - Long: `Remove a dashboard. + cmd.Use = "delete DASHBOARD_ID" + cmd.Short = `Remove a dashboard.` + cmd.Long = `Remove a dashboard. Moves a dashboard to the trash. Trashed dashboards do not appear in list views - or searches, and cannot be shared.`, + or searches, and cannot be shared.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -126,32 +165,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sql.GetDashboardRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetDashboardRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sql.GetDashboardRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get DASHBOARD_ID", - Short: `Retrieve a definition.`, - Long: `Retrieve a definition. + cmd.Use = "get DASHBOARD_ID" + cmd.Short = `Retrieve a definition.` + cmd.Long = `Retrieve a definition. Returns a JSON representation of a dashboard object, including its - visualization and query objects.`, + visualization and query objects.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -179,45 +239,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sql.ListDashboardsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListDashboardsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListDashboardsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().Var(&listReq.Order, "order", `Name of dashboard attribute to order by.`) - listCmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) - listCmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of dashboards to return per page.`) - listCmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) + cmd.Flags().Var(&listReq.Order, "order", `Name of dashboard attribute to order by.`) + cmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of dashboards to return per page.`) + cmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get dashboard objects.`, - Long: `Get dashboard objects. + cmd.Use = "list" + cmd.Short = `Get dashboard objects.` + cmd.Long = `Get dashboard objects. - Fetch a paginated list of dashboard objects.`, + Fetch a paginated list of dashboard objects.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -234,31 +316,52 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start restore command -var restoreReq sql.RestoreDashboardRequest -func init() { - Cmd.AddCommand(restoreCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreOverrides []func( + *cobra.Command, + *sql.RestoreDashboardRequest, +) + +func newRestore() *cobra.Command { + cmd := &cobra.Command{} + + var restoreReq sql.RestoreDashboardRequest + // TODO: short flags -} - -var restoreCmd = &cobra.Command{ - Use: "restore DASHBOARD_ID", - Short: `Restore a dashboard.`, - Long: `Restore a dashboard. + cmd.Use = "restore DASHBOARD_ID" + cmd.Short = `Restore a dashboard.` + cmd.Long = `Restore a dashboard. - A restored dashboard appears in list views and searches and can be shared.`, + A restored dashboard appears in list views and searches and can be shared.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -286,10 +389,24 @@ var restoreCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreOverrides { + fn(cmd, &restoreReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestore()) + }) } // end service Dashboards diff --git a/cmd/workspace/dashboards/overrides.go b/cmd/workspace/dashboards/overrides.go index ba7e42ec7..709e657f8 100644 --- a/cmd/workspace/dashboards/overrides.go +++ b/cmd/workspace/dashboards/overrides.go @@ -1,10 +1,18 @@ package dashboards -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *sql.ListDashboardsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{range .}}{{.Id|green}} {{.Name}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/data-sources/data-sources.go b/cmd/workspace/data-sources/data-sources.go index 513ce2144..969399f42 100755 --- a/cmd/workspace/data-sources/data-sources.go +++ b/cmd/workspace/data-sources/data-sources.go @@ -8,10 +8,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "data-sources", - Short: `This API is provided to assist you in making new query objects.`, - Long: `This API is provided to assist you in making new query objects. When creating +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "data-sources", + Short: `This API is provided to assist you in making new query objects.`, + Long: `This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a data_source_id for the SQL warehouse against which it will run. If you don't already know the data_source_id for your desired SQL warehouse, this API will help you find @@ -21,30 +26,43 @@ var Cmd = &cobra.Command{ in your workspace. We advise you to use any text editor, REST client, or grep to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get a list of SQL warehouses.`, - Long: `Get a list of SQL warehouses. + cmd.Use = "list" + cmd.Short = `Get a list of SQL warehouses.` + cmd.Long = `Get a list of SQL warehouses. Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, - you need only a SQL warehouse's id to create new queries against it.`, + you need only a SQL warehouse's id to create new queries against it.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.DataSources.List(ctx) @@ -52,10 +70,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service DataSources diff --git a/cmd/workspace/experiments/experiments.go b/cmd/workspace/experiments/experiments.go index 738c0240d..13087029d 100755 --- a/cmd/workspace/experiments/experiments.go +++ b/cmd/workspace/experiments/experiments.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "experiments", - Short: `Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.`, - Long: `Experiments are the primary unit of organization in MLflow; all MLflow runs +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "experiments", + Short: `Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.`, + Long: `Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking @@ -24,47 +29,64 @@ var Cmd = &cobra.Command{ Experiments are located in the workspace file tree. You manage experiments using the same tools you use to manage other workspace objects such as folders, notebooks, and libraries.`, - Annotations: map[string]string{ - "package": "ml", - }, + GroupID: "ml", + Annotations: map[string]string{ + "package": "ml", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create-experiment command -var createExperimentReq ml.CreateExperiment -var createExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(createExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createExperimentOverrides []func( + *cobra.Command, + *ml.CreateExperiment, +) + +func newCreateExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var createExperimentReq ml.CreateExperiment + var createExperimentJson flags.JsonFlag + // TODO: short flags - createExperimentCmd.Flags().Var(&createExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createExperimentCmd.Flags().StringVar(&createExperimentReq.ArtifactLocation, "artifact-location", createExperimentReq.ArtifactLocation, `Location where all artifacts for the experiment are stored.`) + cmd.Flags().StringVar(&createExperimentReq.ArtifactLocation, "artifact-location", createExperimentReq.ArtifactLocation, `Location where all artifacts for the experiment are stored.`) // TODO: array: tags -} - -var createExperimentCmd = &cobra.Command{ - Use: "create-experiment NAME", - Short: `Create experiment.`, - Long: `Create experiment. + cmd.Use = "create-experiment NAME" + cmd.Short = `Create experiment.` + cmd.Long = `Create experiment. Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. - Throws RESOURCE_ALREADY_EXISTS if a experiment with the given name exists.`, + Throws RESOURCE_ALREADY_EXISTS if a experiment with the given name exists.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -82,48 +104,70 @@ var createExperimentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createExperimentOverrides { + fn(cmd, &createExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateExperiment()) + }) } // start create-run command -var createRunReq ml.CreateRun -var createRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(createRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createRunOverrides []func( + *cobra.Command, + *ml.CreateRun, +) + +func newCreateRun() *cobra.Command { + cmd := &cobra.Command{} + + var createRunReq ml.CreateRun + var createRunJson flags.JsonFlag + // TODO: short flags - createRunCmd.Flags().Var(&createRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createRunCmd.Flags().StringVar(&createRunReq.ExperimentId, "experiment-id", createRunReq.ExperimentId, `ID of the associated experiment.`) - createRunCmd.Flags().Int64Var(&createRunReq.StartTime, "start-time", createRunReq.StartTime, `Unix timestamp in milliseconds of when the run started.`) + cmd.Flags().StringVar(&createRunReq.ExperimentId, "experiment-id", createRunReq.ExperimentId, `ID of the associated experiment.`) + cmd.Flags().Int64Var(&createRunReq.StartTime, "start-time", createRunReq.StartTime, `Unix timestamp in milliseconds of when the run started.`) // TODO: array: tags - createRunCmd.Flags().StringVar(&createRunReq.UserId, "user-id", createRunReq.UserId, `ID of the user executing the run.`) + cmd.Flags().StringVar(&createRunReq.UserId, "user-id", createRunReq.UserId, `ID of the user executing the run.`) -} - -var createRunCmd = &cobra.Command{ - Use: "create-run", - Short: `Create a run.`, - Long: `Create a run. + cmd.Use = "create-run" + cmd.Short = `Create a run.` + cmd.Long = `Create a run. Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the mlflowParam, mlflowMetric and mlflowRunTag associated with a single - execution.`, + execution.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -140,42 +184,64 @@ var createRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createRunOverrides { + fn(cmd, &createRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateRun()) + }) } // start delete-experiment command -var deleteExperimentReq ml.DeleteExperiment -var deleteExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteExperimentOverrides []func( + *cobra.Command, + *ml.DeleteExperiment, +) + +func newDeleteExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var deleteExperimentReq ml.DeleteExperiment + var deleteExperimentJson flags.JsonFlag + // TODO: short flags - deleteExperimentCmd.Flags().Var(&deleteExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteExperimentCmd = &cobra.Command{ - Use: "delete-experiment EXPERIMENT_ID", - Short: `Delete an experiment.`, - Long: `Delete an experiment. + cmd.Use = "delete-experiment EXPERIMENT_ID" + cmd.Short = `Delete an experiment.` + cmd.Long = `Delete an experiment. Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with - experiment are also deleted.`, + experiment are also deleted.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -193,40 +259,62 @@ var deleteExperimentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteExperimentOverrides { + fn(cmd, &deleteExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteExperiment()) + }) } // start delete-run command -var deleteRunReq ml.DeleteRun -var deleteRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteRunOverrides []func( + *cobra.Command, + *ml.DeleteRun, +) + +func newDeleteRun() *cobra.Command { + cmd := &cobra.Command{} + + var deleteRunReq ml.DeleteRun + var deleteRunJson flags.JsonFlag + // TODO: short flags - deleteRunCmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteRunCmd = &cobra.Command{ - Use: "delete-run RUN_ID", - Short: `Delete a run.`, - Long: `Delete a run. + cmd.Use = "delete-run RUN_ID" + cmd.Short = `Delete a run.` + cmd.Long = `Delete a run. - Marks a run for deletion.`, + Marks a run for deletion.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -244,41 +332,143 @@ var deleteRunCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, -} + cmd.ValidArgsFunction = cobra.NoFileCompletions -// start delete-tag command -var deleteTagReq ml.DeleteTag -var deleteTagJson flags.JsonFlag + // Apply optional overrides to this command. + for _, fn := range deleteRunOverrides { + fn(cmd, &deleteRunReq) + } + + return cmd +} func init() { - Cmd.AddCommand(deleteTagCmd) - // TODO: short flags - deleteTagCmd.Flags().Var(&deleteTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) - + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteRun()) + }) } -var deleteTagCmd = &cobra.Command{ - Use: "delete-tag RUN_ID KEY", - Short: `Delete a tag.`, - Long: `Delete a tag. - - Deletes a tag on a run. Tags are run metadata that can be updated during a run - and after a run completes.`, +// start delete-runs command - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteRunsOverrides []func( + *cobra.Command, + *ml.DeleteRuns, +) + +func newDeleteRuns() *cobra.Command { + cmd := &cobra.Command{} + + var deleteRunsReq ml.DeleteRuns + var deleteRunsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&deleteRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().IntVar(&deleteRunsReq.MaxRuns, "max-runs", deleteRunsReq.MaxRuns, `An optional positive integer indicating the maximum number of runs to delete.`) + + cmd.Use = "delete-runs EXPERIMENT_ID MAX_TIMESTAMP_MILLIS" + cmd.Short = `Delete runs by creation time.` + cmd.Long = `Delete runs by creation time. + + Bulk delete runs in an experiment that were created prior to or at the + specified timestamp. Deletes at most max_runs per request.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = deleteRunsJson.Unmarshal(&deleteRunsReq) + if err != nil { + return err + } + } else { + deleteRunsReq.ExperimentId = args[0] + _, err = fmt.Sscan(args[1], &deleteRunsReq.MaxTimestampMillis) + if err != nil { + return fmt.Errorf("invalid MAX_TIMESTAMP_MILLIS: %s", args[1]) + } + } + + response, err := w.Experiments.DeleteRuns(ctx, deleteRunsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteRunsOverrides { + fn(cmd, &deleteRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteRuns()) + }) +} + +// start delete-tag command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteTagOverrides []func( + *cobra.Command, + *ml.DeleteTag, +) + +func newDeleteTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteTagReq ml.DeleteTag + var deleteTagJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&deleteTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "delete-tag RUN_ID KEY" + cmd.Short = `Delete a tag.` + cmd.Long = `Delete a tag. + + Deletes a tag on a run. Tags are run metadata that can be updated during a run + and after a run completes.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -297,25 +487,45 @@ var deleteTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteTagOverrides { + fn(cmd, &deleteTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteTag()) + }) } // start get-by-name command -var getByNameReq ml.GetByNameRequest -func init() { - Cmd.AddCommand(getByNameCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getByNameOverrides []func( + *cobra.Command, + *ml.GetByNameRequest, +) + +func newGetByName() *cobra.Command { + cmd := &cobra.Command{} + + var getByNameReq ml.GetByNameRequest + // TODO: short flags -} - -var getByNameCmd = &cobra.Command{ - Use: "get-by-name EXPERIMENT_NAME", - Short: `Get metadata.`, - Long: `Get metadata. + cmd.Use = "get-by-name EXPERIMENT_NAME" + cmd.Short = `Get metadata.` + cmd.Long = `Get metadata. Gets metadata for an experiment. @@ -325,15 +535,17 @@ var getByNameCmd = &cobra.Command{ them. Throws RESOURCE_DOES_NOT_EXIST if no experiment with the specified name - exists.`, + exists.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -344,35 +556,57 @@ var getByNameCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getByNameOverrides { + fn(cmd, &getByNameReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetByName()) + }) } // start get-experiment command -var getExperimentReq ml.GetExperimentRequest -func init() { - Cmd.AddCommand(getExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getExperimentOverrides []func( + *cobra.Command, + *ml.GetExperimentRequest, +) + +func newGetExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var getExperimentReq ml.GetExperimentRequest + // TODO: short flags -} - -var getExperimentCmd = &cobra.Command{ - Use: "get-experiment EXPERIMENT_ID", - Short: `Get an experiment.`, - Long: `Get an experiment. + cmd.Use = "get-experiment EXPERIMENT_ID" + cmd.Short = `Get an experiment.` + cmd.Long = `Get an experiment. - Gets metadata for an experiment. This method works on deleted experiments.`, + Gets metadata for an experiment. This method works on deleted experiments.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -383,86 +617,253 @@ var getExperimentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getExperimentOverrides { + fn(cmd, &getExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetExperiment()) + }) } // start get-history command -var getHistoryReq ml.GetHistoryRequest -func init() { - Cmd.AddCommand(getHistoryCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getHistoryOverrides []func( + *cobra.Command, + *ml.GetHistoryRequest, +) + +func newGetHistory() *cobra.Command { + cmd := &cobra.Command{} + + var getHistoryReq ml.GetHistoryRequest + // TODO: short flags - getHistoryCmd.Flags().IntVar(&getHistoryReq.MaxResults, "max-results", getHistoryReq.MaxResults, `Maximum number of Metric records to return per paginated request.`) - getHistoryCmd.Flags().StringVar(&getHistoryReq.PageToken, "page-token", getHistoryReq.PageToken, `Token indicating the page of metric histories to fetch.`) - getHistoryCmd.Flags().StringVar(&getHistoryReq.RunId, "run-id", getHistoryReq.RunId, `ID of the run from which to fetch metric values.`) - getHistoryCmd.Flags().StringVar(&getHistoryReq.RunUuid, "run-uuid", getHistoryReq.RunUuid, `[Deprecated, use run_id instead] ID of the run from which to fetch metric values.`) + cmd.Flags().IntVar(&getHistoryReq.MaxResults, "max-results", getHistoryReq.MaxResults, `Maximum number of Metric records to return per paginated request.`) + cmd.Flags().StringVar(&getHistoryReq.PageToken, "page-token", getHistoryReq.PageToken, `Token indicating the page of metric histories to fetch.`) + cmd.Flags().StringVar(&getHistoryReq.RunId, "run-id", getHistoryReq.RunId, `ID of the run from which to fetch metric values.`) + cmd.Flags().StringVar(&getHistoryReq.RunUuid, "run-uuid", getHistoryReq.RunUuid, `[Deprecated, use run_id instead] ID of the run from which to fetch metric values.`) -} - -var getHistoryCmd = &cobra.Command{ - Use: "get-history METRIC_KEY", - Short: `Get history of a given metric within a run.`, - Long: `Get history of a given metric within a run. + cmd.Use = "get-history METRIC_KEY" + cmd.Short = `Get history of a given metric within a run.` + cmd.Long = `Get history of a given metric within a run. - Gets a list of all values for the specified metric for a given run.`, + Gets a list of all values for the specified metric for a given run.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) getHistoryReq.MetricKey = args[0] - response, err := w.Experiments.GetHistory(ctx, getHistoryReq) + response, err := w.Experiments.GetHistoryAll(ctx, getHistoryReq) if err != nil { return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getHistoryOverrides { + fn(cmd, &getHistoryReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetHistory()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *ml.GetExperimentPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq ml.GetExperimentPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels EXPERIMENT_ID" + cmd.Short = `Get experiment permission levels.` + cmd.Long = `Get experiment permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.ExperimentId = args[0] + + response, err := w.Experiments.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *ml.GetExperimentPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq ml.GetExperimentPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions EXPERIMENT_ID" + cmd.Short = `Get experiment permissions.` + cmd.Long = `Get experiment permissions. + + Gets the permissions of an experiment. Experiments can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.ExperimentId = args[0] + + response, err := w.Experiments.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start get-run command -var getRunReq ml.GetRunRequest -func init() { - Cmd.AddCommand(getRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRunOverrides []func( + *cobra.Command, + *ml.GetRunRequest, +) + +func newGetRun() *cobra.Command { + cmd := &cobra.Command{} + + var getRunReq ml.GetRunRequest + // TODO: short flags - getRunCmd.Flags().StringVar(&getRunReq.RunUuid, "run-uuid", getRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to fetch.`) + cmd.Flags().StringVar(&getRunReq.RunUuid, "run-uuid", getRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to fetch.`) -} - -var getRunCmd = &cobra.Command{ - Use: "get-run RUN_ID", - Short: `Get a run.`, - Long: `Get a run. + cmd.Use = "get-run RUN_ID" + cmd.Short = `Get a run.` + cmd.Long = `Get a run. Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. If there are multiple values with the latest timestamp, return the maximum of - these values.`, + these values.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -473,46 +874,68 @@ var getRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRunOverrides { + fn(cmd, &getRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRun()) + }) } // start list-artifacts command -var listArtifactsReq ml.ListArtifactsRequest -var listArtifactsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listArtifactsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listArtifactsOverrides []func( + *cobra.Command, + *ml.ListArtifactsRequest, +) + +func newListArtifacts() *cobra.Command { + cmd := &cobra.Command{} + + var listArtifactsReq ml.ListArtifactsRequest + var listArtifactsJson flags.JsonFlag + // TODO: short flags - listArtifactsCmd.Flags().Var(&listArtifactsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listArtifactsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.PageToken, "page-token", listArtifactsReq.PageToken, `Token indicating the page of artifact results to fetch.`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.Path, "path", listArtifactsReq.Path, `Filter artifacts matching this path (a relative path from the root artifact directory).`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.RunId, "run-id", listArtifactsReq.RunId, `ID of the run whose artifacts to list.`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.RunUuid, "run-uuid", listArtifactsReq.RunUuid, `[Deprecated, use run_id instead] ID of the run whose artifacts to list.`) + cmd.Flags().StringVar(&listArtifactsReq.PageToken, "page-token", listArtifactsReq.PageToken, `Token indicating the page of artifact results to fetch.`) + cmd.Flags().StringVar(&listArtifactsReq.Path, "path", listArtifactsReq.Path, `Filter artifacts matching this path (a relative path from the root artifact directory).`) + cmd.Flags().StringVar(&listArtifactsReq.RunId, "run-id", listArtifactsReq.RunId, `ID of the run whose artifacts to list.`) + cmd.Flags().StringVar(&listArtifactsReq.RunUuid, "run-uuid", listArtifactsReq.RunUuid, `[Deprecated, use run_id instead] ID of the run whose artifacts to list.`) -} - -var listArtifactsCmd = &cobra.Command{ - Use: "list-artifacts", - Short: `Get all artifacts.`, - Long: `Get all artifacts. + cmd.Use = "list-artifacts" + cmd.Short = `Get all artifacts.` + cmd.Long = `Get all artifacts. List artifacts for a run. Takes an optional artifact_path prefix. If it is - specified, the response contains only artifacts with the specified prefix.",`, + specified, the response contains only artifacts with the specified prefix.",` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -529,44 +952,66 @@ var listArtifactsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listArtifactsOverrides { + fn(cmd, &listArtifactsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListArtifacts()) + }) } // start list-experiments command -var listExperimentsReq ml.ListExperimentsRequest -var listExperimentsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listExperimentsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listExperimentsOverrides []func( + *cobra.Command, + *ml.ListExperimentsRequest, +) + +func newListExperiments() *cobra.Command { + cmd := &cobra.Command{} + + var listExperimentsReq ml.ListExperimentsRequest + var listExperimentsJson flags.JsonFlag + // TODO: short flags - listExperimentsCmd.Flags().Var(&listExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listExperimentsCmd.Flags().IntVar(&listExperimentsReq.MaxResults, "max-results", listExperimentsReq.MaxResults, `Maximum number of experiments desired.`) - listExperimentsCmd.Flags().StringVar(&listExperimentsReq.PageToken, "page-token", listExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) - listExperimentsCmd.Flags().StringVar(&listExperimentsReq.ViewType, "view-type", listExperimentsReq.ViewType, `Qualifier for type of experiments to be returned.`) + cmd.Flags().IntVar(&listExperimentsReq.MaxResults, "max-results", listExperimentsReq.MaxResults, `Maximum number of experiments desired.`) + cmd.Flags().StringVar(&listExperimentsReq.PageToken, "page-token", listExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) + cmd.Flags().StringVar(&listExperimentsReq.ViewType, "view-type", listExperimentsReq.ViewType, `Qualifier for type of experiments to be returned.`) -} - -var listExperimentsCmd = &cobra.Command{ - Use: "list-experiments", - Short: `List experiments.`, - Long: `List experiments. + cmd.Use = "list-experiments" + cmd.Short = `List experiments.` + cmd.Long = `List experiments. - Gets a list of all experiments.`, + Gets a list of all experiments.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -583,32 +1028,52 @@ var listExperimentsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listExperimentsOverrides { + fn(cmd, &listExperimentsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListExperiments()) + }) } // start log-batch command -var logBatchReq ml.LogBatch -var logBatchJson flags.JsonFlag -func init() { - Cmd.AddCommand(logBatchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logBatchOverrides []func( + *cobra.Command, + *ml.LogBatch, +) + +func newLogBatch() *cobra.Command { + cmd := &cobra.Command{} + + var logBatchReq ml.LogBatch + var logBatchJson flags.JsonFlag + // TODO: short flags - logBatchCmd.Flags().Var(&logBatchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logBatchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: metrics // TODO: array: params - logBatchCmd.Flags().StringVar(&logBatchReq.RunId, "run-id", logBatchReq.RunId, `ID of the run to log under.`) + cmd.Flags().StringVar(&logBatchReq.RunId, "run-id", logBatchReq.RunId, `ID of the run to log under.`) // TODO: array: tags -} - -var logBatchCmd = &cobra.Command{ - Use: "log-batch", - Short: `Log a batch.`, - Long: `Log a batch. + cmd.Use = "log-batch" + cmd.Short = `Log a batch.` + cmd.Long = `Log a batch. Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). @@ -645,19 +1110,21 @@ var logBatchCmd = &cobra.Command{ The following limits also apply to metric, param, and tag keys and values: - * Metric keyes, param keys, and tag keys can be up to 250 characters in length - * Parameter and tag values can be up to 250 characters in length`, + * Metric keys, param keys, and tag keys can be up to 250 characters in length + * Parameter and tag values can be up to 250 characters in length` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -674,44 +1141,66 @@ var logBatchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logBatchOverrides { + fn(cmd, &logBatchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogBatch()) + }) } // start log-inputs command -var logInputsReq ml.LogInputs -var logInputsJson flags.JsonFlag -func init() { - Cmd.AddCommand(logInputsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logInputsOverrides []func( + *cobra.Command, + *ml.LogInputs, +) + +func newLogInputs() *cobra.Command { + cmd := &cobra.Command{} + + var logInputsReq ml.LogInputs + var logInputsJson flags.JsonFlag + // TODO: short flags - logInputsCmd.Flags().Var(&logInputsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logInputsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: datasets - logInputsCmd.Flags().StringVar(&logInputsReq.RunId, "run-id", logInputsReq.RunId, `ID of the run to log under.`) + cmd.Flags().StringVar(&logInputsReq.RunId, "run-id", logInputsReq.RunId, `ID of the run to log under.`) -} - -var logInputsCmd = &cobra.Command{ - Use: "log-inputs", - Short: `Log inputs to a run.`, - Long: `Log inputs to a run. + cmd.Use = "log-inputs" + cmd.Short = `Log inputs to a run.` + cmd.Long = `Log inputs to a run. **NOTE:** Experimental: This API may change or be removed in a future release - without warning.`, + without warning.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -728,46 +1217,68 @@ var logInputsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logInputsOverrides { + fn(cmd, &logInputsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogInputs()) + }) } // start log-metric command -var logMetricReq ml.LogMetric -var logMetricJson flags.JsonFlag -func init() { - Cmd.AddCommand(logMetricCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logMetricOverrides []func( + *cobra.Command, + *ml.LogMetric, +) + +func newLogMetric() *cobra.Command { + cmd := &cobra.Command{} + + var logMetricReq ml.LogMetric + var logMetricJson flags.JsonFlag + // TODO: short flags - logMetricCmd.Flags().Var(&logMetricJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logMetricJson, "json", `either inline JSON string or @path/to/file.json with request body`) - logMetricCmd.Flags().StringVar(&logMetricReq.RunId, "run-id", logMetricReq.RunId, `ID of the run under which to log the metric.`) - logMetricCmd.Flags().StringVar(&logMetricReq.RunUuid, "run-uuid", logMetricReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the metric.`) - logMetricCmd.Flags().Int64Var(&logMetricReq.Step, "step", logMetricReq.Step, `Step at which to log the metric.`) + cmd.Flags().StringVar(&logMetricReq.RunId, "run-id", logMetricReq.RunId, `ID of the run under which to log the metric.`) + cmd.Flags().StringVar(&logMetricReq.RunUuid, "run-uuid", logMetricReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the metric.`) + cmd.Flags().Int64Var(&logMetricReq.Step, "step", logMetricReq.Step, `Step at which to log the metric.`) -} - -var logMetricCmd = &cobra.Command{ - Use: "log-metric KEY VALUE TIMESTAMP", - Short: `Log a metric.`, - Long: `Log a metric. + cmd.Use = "log-metric KEY VALUE TIMESTAMP" + cmd.Short = `Log a metric.` + cmd.Long = `Log a metric. Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that - represent ML model accuracy. A metric can be logged multiple times.`, + represent ML model accuracy. A metric can be logged multiple times.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -793,44 +1304,66 @@ var logMetricCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logMetricOverrides { + fn(cmd, &logMetricReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogMetric()) + }) } // start log-model command -var logModelReq ml.LogModel -var logModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(logModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logModelOverrides []func( + *cobra.Command, + *ml.LogModel, +) + +func newLogModel() *cobra.Command { + cmd := &cobra.Command{} + + var logModelReq ml.LogModel + var logModelJson flags.JsonFlag + // TODO: short flags - logModelCmd.Flags().Var(&logModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - logModelCmd.Flags().StringVar(&logModelReq.ModelJson, "model-json", logModelReq.ModelJson, `MLmodel file in json format.`) - logModelCmd.Flags().StringVar(&logModelReq.RunId, "run-id", logModelReq.RunId, `ID of the run to log under.`) + cmd.Flags().StringVar(&logModelReq.ModelJson, "model-json", logModelReq.ModelJson, `MLmodel file in json format.`) + cmd.Flags().StringVar(&logModelReq.RunId, "run-id", logModelReq.RunId, `ID of the run to log under.`) -} - -var logModelCmd = &cobra.Command{ - Use: "log-model", - Short: `Log a model.`, - Long: `Log a model. + cmd.Use = "log-model" + cmd.Short = `Log a model.` + cmd.Long = `Log a model. **NOTE:** Experimental: This API may change or be removed in a future release - without warning.`, + without warning.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -847,46 +1380,68 @@ var logModelCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logModelOverrides { + fn(cmd, &logModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogModel()) + }) } // start log-param command -var logParamReq ml.LogParam -var logParamJson flags.JsonFlag -func init() { - Cmd.AddCommand(logParamCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logParamOverrides []func( + *cobra.Command, + *ml.LogParam, +) + +func newLogParam() *cobra.Command { + cmd := &cobra.Command{} + + var logParamReq ml.LogParam + var logParamJson flags.JsonFlag + // TODO: short flags - logParamCmd.Flags().Var(&logParamJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logParamJson, "json", `either inline JSON string or @path/to/file.json with request body`) - logParamCmd.Flags().StringVar(&logParamReq.RunId, "run-id", logParamReq.RunId, `ID of the run under which to log the param.`) - logParamCmd.Flags().StringVar(&logParamReq.RunUuid, "run-uuid", logParamReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the param.`) + cmd.Flags().StringVar(&logParamReq.RunId, "run-id", logParamReq.RunId, `ID of the run under which to log the param.`) + cmd.Flags().StringVar(&logParamReq.RunUuid, "run-uuid", logParamReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the param.`) -} - -var logParamCmd = &cobra.Command{ - Use: "log-param KEY VALUE", - Short: `Log a param.`, - Long: `Log a param. + cmd.Use = "log-param KEY VALUE" + cmd.Short = `Log a param.` + cmd.Long = `Log a param. Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only - once for a run.`, + once for a run.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -905,45 +1460,67 @@ var logParamCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logParamOverrides { + fn(cmd, &logParamReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogParam()) + }) } // start restore-experiment command -var restoreExperimentReq ml.RestoreExperiment -var restoreExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(restoreExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreExperimentOverrides []func( + *cobra.Command, + *ml.RestoreExperiment, +) + +func newRestoreExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var restoreExperimentReq ml.RestoreExperiment + var restoreExperimentJson flags.JsonFlag + // TODO: short flags - restoreExperimentCmd.Flags().Var(&restoreExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&restoreExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var restoreExperimentCmd = &cobra.Command{ - Use: "restore-experiment EXPERIMENT_ID", - Short: `Restores an experiment.`, - Long: `Restores an experiment. + cmd.Use = "restore-experiment EXPERIMENT_ID" + cmd.Short = `Restores an experiment.` + cmd.Long = `Restores an experiment. Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. Throws RESOURCE_DOES_NOT_EXIST if experiment was never created or was - permanently deleted.`, + permanently deleted.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -961,40 +1538,62 @@ var restoreExperimentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreExperimentOverrides { + fn(cmd, &restoreExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestoreExperiment()) + }) } // start restore-run command -var restoreRunReq ml.RestoreRun -var restoreRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(restoreRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreRunOverrides []func( + *cobra.Command, + *ml.RestoreRun, +) + +func newRestoreRun() *cobra.Command { + cmd := &cobra.Command{} + + var restoreRunReq ml.RestoreRun + var restoreRunJson flags.JsonFlag + // TODO: short flags - restoreRunCmd.Flags().Var(&restoreRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&restoreRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var restoreRunCmd = &cobra.Command{ - Use: "restore-run RUN_ID", - Short: `Restore a run.`, - Long: `Restore a run. + cmd.Use = "restore-run RUN_ID" + cmd.Short = `Restore a run.` + cmd.Long = `Restore a run. - Restores a deleted run.`, + Restores a deleted run.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1012,46 +1611,148 @@ var restoreRunCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreRunOverrides { + fn(cmd, &restoreRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestoreRun()) + }) +} + +// start restore-runs command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreRunsOverrides []func( + *cobra.Command, + *ml.RestoreRuns, +) + +func newRestoreRuns() *cobra.Command { + cmd := &cobra.Command{} + + var restoreRunsReq ml.RestoreRuns + var restoreRunsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&restoreRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().IntVar(&restoreRunsReq.MaxRuns, "max-runs", restoreRunsReq.MaxRuns, `An optional positive integer indicating the maximum number of runs to restore.`) + + cmd.Use = "restore-runs EXPERIMENT_ID MIN_TIMESTAMP_MILLIS" + cmd.Short = `Restore runs by deletion time.` + cmd.Long = `Restore runs by deletion time. + + Bulk restore runs in an experiment that were deleted no earlier than the + specified timestamp. Restores at most max_runs per request.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = restoreRunsJson.Unmarshal(&restoreRunsReq) + if err != nil { + return err + } + } else { + restoreRunsReq.ExperimentId = args[0] + _, err = fmt.Sscan(args[1], &restoreRunsReq.MinTimestampMillis) + if err != nil { + return fmt.Errorf("invalid MIN_TIMESTAMP_MILLIS: %s", args[1]) + } + } + + response, err := w.Experiments.RestoreRuns(ctx, restoreRunsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreRunsOverrides { + fn(cmd, &restoreRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestoreRuns()) + }) } // start search-experiments command -var searchExperimentsReq ml.SearchExperiments -var searchExperimentsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchExperimentsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchExperimentsOverrides []func( + *cobra.Command, + *ml.SearchExperiments, +) + +func newSearchExperiments() *cobra.Command { + cmd := &cobra.Command{} + + var searchExperimentsReq ml.SearchExperiments + var searchExperimentsJson flags.JsonFlag + // TODO: short flags - searchExperimentsCmd.Flags().Var(&searchExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - searchExperimentsCmd.Flags().StringVar(&searchExperimentsReq.Filter, "filter", searchExperimentsReq.Filter, `String representing a SQL filter condition (e.g.`) - searchExperimentsCmd.Flags().Int64Var(&searchExperimentsReq.MaxResults, "max-results", searchExperimentsReq.MaxResults, `Maximum number of experiments desired.`) + cmd.Flags().StringVar(&searchExperimentsReq.Filter, "filter", searchExperimentsReq.Filter, `String representing a SQL filter condition (e.g.`) + cmd.Flags().Int64Var(&searchExperimentsReq.MaxResults, "max-results", searchExperimentsReq.MaxResults, `Maximum number of experiments desired.`) // TODO: array: order_by - searchExperimentsCmd.Flags().StringVar(&searchExperimentsReq.PageToken, "page-token", searchExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) - searchExperimentsCmd.Flags().Var(&searchExperimentsReq.ViewType, "view-type", `Qualifier for type of experiments to be returned.`) + cmd.Flags().StringVar(&searchExperimentsReq.PageToken, "page-token", searchExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) + cmd.Flags().Var(&searchExperimentsReq.ViewType, "view-type", `Qualifier for type of experiments to be returned.`) -} - -var searchExperimentsCmd = &cobra.Command{ - Use: "search-experiments", - Short: `Search experiments.`, - Long: `Search experiments. + cmd.Use = "search-experiments" + cmd.Short = `Search experiments.` + cmd.Long = `Search experiments. - Searches for experiments that satisfy specified search criteria.`, + Searches for experiments that satisfy specified search criteria.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1068,49 +1769,71 @@ var searchExperimentsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchExperimentsOverrides { + fn(cmd, &searchExperimentsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchExperiments()) + }) } // start search-runs command -var searchRunsReq ml.SearchRuns -var searchRunsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchRunsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchRunsOverrides []func( + *cobra.Command, + *ml.SearchRuns, +) + +func newSearchRuns() *cobra.Command { + cmd := &cobra.Command{} + + var searchRunsReq ml.SearchRuns + var searchRunsJson flags.JsonFlag + // TODO: short flags - searchRunsCmd.Flags().Var(&searchRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: experiment_ids - searchRunsCmd.Flags().StringVar(&searchRunsReq.Filter, "filter", searchRunsReq.Filter, `A filter expression over params, metrics, and tags, that allows returning a subset of runs.`) - searchRunsCmd.Flags().IntVar(&searchRunsReq.MaxResults, "max-results", searchRunsReq.MaxResults, `Maximum number of runs desired.`) + cmd.Flags().StringVar(&searchRunsReq.Filter, "filter", searchRunsReq.Filter, `A filter expression over params, metrics, and tags, that allows returning a subset of runs.`) + cmd.Flags().IntVar(&searchRunsReq.MaxResults, "max-results", searchRunsReq.MaxResults, `Maximum number of runs desired.`) // TODO: array: order_by - searchRunsCmd.Flags().StringVar(&searchRunsReq.PageToken, "page-token", searchRunsReq.PageToken, `Token for the current page of runs.`) - searchRunsCmd.Flags().Var(&searchRunsReq.RunViewType, "run-view-type", `Whether to display only active, only deleted, or all runs.`) + cmd.Flags().StringVar(&searchRunsReq.PageToken, "page-token", searchRunsReq.PageToken, `Token for the current page of runs.`) + cmd.Flags().Var(&searchRunsReq.RunViewType, "run-view-type", `Whether to display only active, only deleted, or all runs.`) -} - -var searchRunsCmd = &cobra.Command{ - Use: "search-runs", - Short: `Search for runs.`, - Long: `Search for runs. + cmd.Use = "search-runs" + cmd.Short = `Search for runs.` + cmd.Long = `Search for runs. Searches for runs that satisfy expressions. - Search expressions can use mlflowMetric and mlflowParam keys.",`, + Search expressions can use mlflowMetric and mlflowParam keys.",` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1127,40 +1850,62 @@ var searchRunsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchRunsOverrides { + fn(cmd, &searchRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchRuns()) + }) } // start set-experiment-tag command -var setExperimentTagReq ml.SetExperimentTag -var setExperimentTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setExperimentTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setExperimentTagOverrides []func( + *cobra.Command, + *ml.SetExperimentTag, +) + +func newSetExperimentTag() *cobra.Command { + cmd := &cobra.Command{} + + var setExperimentTagReq ml.SetExperimentTag + var setExperimentTagJson flags.JsonFlag + // TODO: short flags - setExperimentTagCmd.Flags().Var(&setExperimentTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setExperimentTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setExperimentTagCmd = &cobra.Command{ - Use: "set-experiment-tag EXPERIMENT_ID KEY VALUE", - Short: `Set a tag.`, - Long: `Set a tag. + cmd.Use = "set-experiment-tag EXPERIMENT_ID KEY VALUE" + cmd.Short = `Set a tag.` + cmd.Long = `Set a tag. - Sets a tag on an experiment. Experiment tags are metadata that can be updated.`, + Sets a tag on an experiment. Experiment tags are metadata that can be updated.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1180,44 +1925,138 @@ var setExperimentTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setExperimentTagOverrides { + fn(cmd, &setExperimentTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetExperimentTag()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *ml.ExperimentPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq ml.ExperimentPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions EXPERIMENT_ID" + cmd.Short = `Set experiment permissions.` + cmd.Long = `Set experiment permissions. + + Sets permissions on an experiment. Experiments can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.ExperimentId = args[0] + + response, err := w.Experiments.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start set-tag command -var setTagReq ml.SetTag -var setTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setTagOverrides []func( + *cobra.Command, + *ml.SetTag, +) + +func newSetTag() *cobra.Command { + cmd := &cobra.Command{} + + var setTagReq ml.SetTag + var setTagJson flags.JsonFlag + // TODO: short flags - setTagCmd.Flags().Var(&setTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) - setTagCmd.Flags().StringVar(&setTagReq.RunId, "run-id", setTagReq.RunId, `ID of the run under which to log the tag.`) - setTagCmd.Flags().StringVar(&setTagReq.RunUuid, "run-uuid", setTagReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the tag.`) + cmd.Flags().StringVar(&setTagReq.RunId, "run-id", setTagReq.RunId, `ID of the run under which to log the tag.`) + cmd.Flags().StringVar(&setTagReq.RunUuid, "run-uuid", setTagReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the tag.`) -} - -var setTagCmd = &cobra.Command{ - Use: "set-tag KEY VALUE", - Short: `Set a tag.`, - Long: `Set a tag. + cmd.Use = "set-tag KEY VALUE" + cmd.Short = `Set a tag.` + cmd.Long = `Set a tag. Sets a tag on a run. Tags are run metadata that can be updated during a run - and after a run completes.`, + and after a run completes.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1236,42 +2075,64 @@ var setTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setTagOverrides { + fn(cmd, &setTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetTag()) + }) } // start update-experiment command -var updateExperimentReq ml.UpdateExperiment -var updateExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateExperimentOverrides []func( + *cobra.Command, + *ml.UpdateExperiment, +) + +func newUpdateExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var updateExperimentReq ml.UpdateExperiment + var updateExperimentJson flags.JsonFlag + // TODO: short flags - updateExperimentCmd.Flags().Var(&updateExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateExperimentCmd.Flags().StringVar(&updateExperimentReq.NewName, "new-name", updateExperimentReq.NewName, `If provided, the experiment's name is changed to the new name.`) + cmd.Flags().StringVar(&updateExperimentReq.NewName, "new-name", updateExperimentReq.NewName, `If provided, the experiment's name is changed to the new name.`) -} - -var updateExperimentCmd = &cobra.Command{ - Use: "update-experiment EXPERIMENT_ID", - Short: `Update an experiment.`, - Long: `Update an experiment. + cmd.Use = "update-experiment EXPERIMENT_ID" + cmd.Short = `Update an experiment.` + cmd.Long = `Update an experiment. - Updates experiment metadata.`, + Updates experiment metadata.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1289,45 +2150,139 @@ var updateExperimentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateExperimentOverrides { + fn(cmd, &updateExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateExperiment()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *ml.ExperimentPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq ml.ExperimentPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions EXPERIMENT_ID" + cmd.Short = `Update experiment permissions.` + cmd.Long = `Update experiment permissions. + + Updates the permissions on an experiment. Experiments can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.ExperimentId = args[0] + + response, err := w.Experiments.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // start update-run command -var updateRunReq ml.UpdateRun -var updateRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateRunOverrides []func( + *cobra.Command, + *ml.UpdateRun, +) + +func newUpdateRun() *cobra.Command { + cmd := &cobra.Command{} + + var updateRunReq ml.UpdateRun + var updateRunJson flags.JsonFlag + // TODO: short flags - updateRunCmd.Flags().Var(&updateRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateRunCmd.Flags().Int64Var(&updateRunReq.EndTime, "end-time", updateRunReq.EndTime, `Unix timestamp in milliseconds of when the run ended.`) - updateRunCmd.Flags().StringVar(&updateRunReq.RunId, "run-id", updateRunReq.RunId, `ID of the run to update.`) - updateRunCmd.Flags().StringVar(&updateRunReq.RunUuid, "run-uuid", updateRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to update.`) - updateRunCmd.Flags().Var(&updateRunReq.Status, "status", `Updated status of the run.`) + cmd.Flags().Int64Var(&updateRunReq.EndTime, "end-time", updateRunReq.EndTime, `Unix timestamp in milliseconds of when the run ended.`) + cmd.Flags().StringVar(&updateRunReq.RunId, "run-id", updateRunReq.RunId, `ID of the run to update.`) + cmd.Flags().StringVar(&updateRunReq.RunUuid, "run-uuid", updateRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to update.`) + cmd.Flags().Var(&updateRunReq.Status, "status", `Updated status of the run.`) -} - -var updateRunCmd = &cobra.Command{ - Use: "update-run", - Short: `Update a run.`, - Long: `Update a run. + cmd.Use = "update-run" + cmd.Short = `Update a run.` + cmd.Long = `Update a run. - Updates run metadata.`, + Updates run metadata.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1344,10 +2299,24 @@ var updateRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateRunOverrides { + fn(cmd, &updateRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateRun()) + }) } // end service Experiments diff --git a/cmd/workspace/external-locations/external-locations.go b/cmd/workspace/external-locations/external-locations.go index a739c931c..7f67b26b7 100755 --- a/cmd/workspace/external-locations/external-locations.go +++ b/cmd/workspace/external-locations/external-locations.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "external-locations", - Short: `An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.`, - Long: `An external location is an object that combines a cloud storage path with a +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "external-locations", + Short: `An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.`, + Long: `An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. Each external location is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not @@ -26,45 +31,64 @@ var Cmd = &cobra.Command{ To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateExternalLocation -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateExternalLocation, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateExternalLocation + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - createCmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`) - createCmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`) + cmd.Flags().StringVar(&createReq.AccessPoint, "access-point", createReq.AccessPoint, `The AWS access point to use when accesing s3 for this external location.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + // TODO: complex arg: encryption_details + cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`) + cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME URL CREDENTIAL_NAME", - Short: `Create an external location.`, - Long: `Create an external location. + cmd.Use = "create NAME URL CREDENTIAL_NAME" + cmd.Short = `Create an external location.` + cmd.Long = `Create an external location. Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the - metastore and the associated storage credential.`, + metastore and the associated storage credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -84,38 +108,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteExternalLocationRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteExternalLocationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteExternalLocationRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external tables or mounts.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external tables or mounts.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete an external location.`, - Long: `Delete an external location. + cmd.Use = "delete NAME" + cmd.Short = `Delete an external location.` + cmd.Long = `Delete an external location. Deletes the specified external location from the metastore. The caller must be - the owner of the external location.`, + the owner of the external location.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -126,37 +172,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetExternalLocationRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetExternalLocationRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetExternalLocationRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get an external location.`, - Long: `Get an external location. + cmd.Use = "get NAME" + cmd.Short = `Get an external location.` + cmd.Long = `Get an external location. Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some - privilege on the external location.`, + privilege on the external location.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -167,32 +235,50 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List external locations.`, - Long: `List external locations. + cmd.Use = "list" + cmd.Short = `List external locations.` + cmd.Long = `List external locations. Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is - no guarantee of a specific ordering of the elements in the array.`, + no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.ExternalLocations.ListAll(ctx) @@ -200,50 +286,74 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateExternalLocation -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateExternalLocation, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateExternalLocation + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) - updateCmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the external location.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) - updateCmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`) - updateCmd.Flags().StringVar(&updateReq.Url, "url", updateReq.Url, `Path URL of the external location.`) + cmd.Flags().StringVar(&updateReq.AccessPoint, "access-point", updateReq.AccessPoint, `The AWS access point to use when accesing s3 for this external location.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) + // TODO: complex arg: encryption_details + cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the external location.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) + cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`) + cmd.Flags().StringVar(&updateReq.Url, "url", updateReq.Url, `Path URL of the external location.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update an external location.`, - Long: `Update an external location. + cmd.Use = "update NAME" + cmd.Short = `Update an external location.` + cmd.Long = `Update an external location. Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin - can only update the name of the external location.`, + can only update the name of the external location.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -261,10 +371,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service ExternalLocations diff --git a/cmd/workspace/external-locations/overrides.go b/cmd/workspace/external-locations/overrides.go index 7efd193d9..a271e5f65 100644 --- a/cmd/workspace/external-locations/overrides.go +++ b/cmd/workspace/external-locations/overrides.go @@ -1,10 +1,17 @@ package external_locations -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Name"}} {{header "Credential"}} {{header "URL"}} {{range .}}{{.Name|green}} {{.CredentialName|cyan}} {{.Url}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/functions/functions.go b/cmd/workspace/functions/functions.go index 9ce3f2faf..02c8531db 100755 --- a/cmd/workspace/functions/functions.go +++ b/cmd/workspace/functions/functions.go @@ -12,51 +12,72 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "functions", - Short: `Functions implement User-Defined Functions (UDFs) in Unity Catalog.`, - Long: `Functions implement User-Defined Functions (UDFs) in Unity Catalog. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "functions", + Short: `Functions implement User-Defined Functions (UDFs) in Unity Catalog.`, + Long: `Functions implement User-Defined Functions (UDFs) in Unity Catalog. The function implementation can be any SQL expression or Query, and it can be invoked wherever a table reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it can be referenced with the form __catalog_name__.__schema_name__.__function_name__.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateFunction -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateFunction, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateFunction + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - createCmd.Flags().StringVar(&createReq.ExternalLanguage, "external-language", createReq.ExternalLanguage, `External function language.`) - createCmd.Flags().StringVar(&createReq.ExternalName, "external-name", createReq.ExternalName, `External function name.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.ExternalLanguage, "external-language", createReq.ExternalLanguage, `External function language.`) + cmd.Flags().StringVar(&createReq.ExternalName, "external-name", createReq.ExternalName, `External function name.`) // TODO: map via StringToStringVar: properties - createCmd.Flags().StringVar(&createReq.SqlPath, "sql-path", createReq.SqlPath, `List of schemes whose objects can be referenced without qualification.`) + cmd.Flags().StringVar(&createReq.SqlPath, "sql-path", createReq.SqlPath, `List of schemes whose objects can be referenced without qualification.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a function.`, - Long: `Create a function. + cmd.Use = "create" + cmd.Short = `Create a function.` + cmd.Long = `Create a function. Creates a new function The user must have the following permissions in order for the function to be created: - **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** - and **CREATE_FUNCTION** on the function's parent schema`, + and **CREATE_FUNCTION** on the function's parent schema` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,38 +95,59 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteFunctionRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteFunctionRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteFunctionRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the function is notempty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the function is notempty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a function.`, - Long: `Delete a function. + cmd.Use = "delete NAME" + cmd.Short = `Delete a function.` + cmd.Long = `Delete a function. Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on - its parent catalog and the **USE_SCHEMA** privilege on its parent schema`, + its parent catalog and the **USE_SCHEMA** privilege on its parent schema` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -133,25 +175,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetFunctionRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetFunctionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetFunctionRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a function.`, - Long: `Get a function. + cmd.Use = "get NAME" + cmd.Short = `Get a function.` + cmd.Long = `Get a function. Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a @@ -159,11 +221,12 @@ var getCmd = &cobra.Command{ **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the **USE_SCHEMA** privilege on the function's parent schema, and the - **EXECUTE** privilege on the function itself`, + **EXECUTE** privilege on the function itself` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -191,25 +254,45 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListFunctionsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListFunctionsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListFunctionsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME SCHEMA_NAME", - Short: `List functions.`, - Long: `List functions. + cmd.Use = "list CATALOG_NAME SCHEMA_NAME" + cmd.Short = `List functions.` + cmd.Long = `List functions. List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, @@ -217,15 +300,17 @@ var listCmd = &cobra.Command{ **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is no guarantee of a specific ordering of the elements in - the array.`, + the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -237,27 +322,47 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateFunction -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateFunction, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateFunction + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of function.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of function.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a function.`, - Long: `Update a function. + cmd.Use = "update NAME" + cmd.Short = `Update a function.` + cmd.Long = `Update a function. Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must @@ -266,11 +371,12 @@ var updateCmd = &cobra.Command{ function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the **USE_SCHEMA** privilege on the - function's parent schema.`, + function's parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -298,10 +404,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Functions diff --git a/cmd/workspace/git-credentials/git-credentials.go b/cmd/workspace/git-credentials/git-credentials.go index f75ed83e0..8d5c59ed8 100755 --- a/cmd/workspace/git-credentials/git-credentials.go +++ b/cmd/workspace/git-credentials/git-credentials.go @@ -12,54 +12,76 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "git-credentials", - Short: `Registers personal access token for Databricks to do operations on behalf of the user.`, - Long: `Registers personal access token for Databricks to do operations on behalf of +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "git-credentials", + Short: `Registers personal access token for Databricks to do operations on behalf of the user.`, + Long: `Registers personal access token for Databricks to do operations on behalf of the user. See [more info]. [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq workspace.CreateCredentials -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *workspace.CreateCredentials, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq workspace.CreateCredentials + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.GitUsername, "git-username", createReq.GitUsername, `Git username.`) - createCmd.Flags().StringVar(&createReq.PersonalAccessToken, "personal-access-token", createReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) + cmd.Flags().StringVar(&createReq.GitUsername, "git-username", createReq.GitUsername, `Git username.`) + cmd.Flags().StringVar(&createReq.PersonalAccessToken, "personal-access-token", createReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) -} - -var createCmd = &cobra.Command{ - Use: "create GIT_PROVIDER", - Short: `Create a credential entry.`, - Long: `Create a credential entry. + cmd.Use = "create GIT_PROVIDER" + cmd.Short = `Create a credential entry.` + cmd.Long = `Create a credential entry. Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the - DELETE endpoint to delete existing credentials.`, + DELETE endpoint to delete existing credentials.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -77,31 +99,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq workspace.DeleteGitCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *workspace.DeleteGitCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq workspace.DeleteGitCredentialRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete CREDENTIAL_ID", - Short: `Delete a credential.`, - Long: `Delete a credential. + cmd.Use = "delete CREDENTIAL_ID" + cmd.Short = `Delete a credential.` + cmd.Long = `Delete a credential. - Deletes the specified Git credential.`, + Deletes the specified Git credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -132,31 +175,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq workspace.GetGitCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *workspace.GetGitCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq workspace.GetGitCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CREDENTIAL_ID", - Short: `Get a credential entry.`, - Long: `Get a credential entry. + cmd.Use = "get CREDENTIAL_ID" + cmd.Short = `Get a credential entry.` + cmd.Long = `Get a credential entry. - Gets the Git credential with the specified credential ID.`, + Gets the Git credential with the specified credential ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -187,30 +251,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get Git credentials.`, - Long: `Get Git credentials. + cmd.Use = "list" + cmd.Short = `Get Git credentials.` + cmd.Long = `Get Git credentials. Lists the calling user's Git credentials. One credential per user is - supported.`, + supported.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.GitCredentials.ListAll(ctx) @@ -218,35 +300,56 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq workspace.UpdateCredentials -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *workspace.UpdateCredentials, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq workspace.UpdateCredentials + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.GitProvider, "git-provider", updateReq.GitProvider, `Git provider.`) - updateCmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `Git username.`) - updateCmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) + cmd.Flags().StringVar(&updateReq.GitProvider, "git-provider", updateReq.GitProvider, `Git provider.`) + cmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `Git username.`) + cmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) -} - -var updateCmd = &cobra.Command{ - Use: "update CREDENTIAL_ID", - Short: `Update a credential.`, - Long: `Update a credential. + cmd.Use = "update CREDENTIAL_ID" + cmd.Short = `Update a credential.` + cmd.Long = `Update a credential. - Updates the specified Git credential.`, + Updates the specified Git credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -277,10 +380,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service GitCredentials diff --git a/cmd/workspace/global-init-scripts/global-init-scripts.go b/cmd/workspace/global-init-scripts/global-init-scripts.go index b63338f6c..12c49a513 100755 --- a/cmd/workspace/global-init-scripts/global-init-scripts.go +++ b/cmd/workspace/global-init-scripts/global-init-scripts.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "global-init-scripts", - Short: `The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.`, - Long: `The Global Init Scripts API enables Workspace administrators to configure +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "global-init-scripts", + Short: `The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.`, + Long: `The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. @@ -24,42 +29,59 @@ var Cmd = &cobra.Command{ script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the entire cluster fails with a GLOBAL_INIT_SCRIPT_FAILURE error code.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq compute.GlobalInitScriptCreateRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.GlobalInitScriptCreateRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq compute.GlobalInitScriptCreateRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Enabled, "enabled", createReq.Enabled, `Specifies whether the script is enabled.`) - createCmd.Flags().IntVar(&createReq.Position, "position", createReq.Position, `The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) + cmd.Flags().BoolVar(&createReq.Enabled, "enabled", createReq.Enabled, `Specifies whether the script is enabled.`) + cmd.Flags().IntVar(&createReq.Position, "position", createReq.Position, `The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME SCRIPT", - Short: `Create init script.`, - Long: `Create init script. + cmd.Use = "create NAME SCRIPT" + cmd.Short = `Create init script.` + cmd.Long = `Create init script. - Creates a new global init script in this workspace.`, + Creates a new global init script in this workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,31 +100,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeleteGlobalInitScriptRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeleteGlobalInitScriptRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq compute.DeleteGlobalInitScriptRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete SCRIPT_ID", - Short: `Delete init script.`, - Long: `Delete init script. + cmd.Use = "delete SCRIPT_ID" + cmd.Short = `Delete init script.` + cmd.Long = `Delete init script. - Deletes a global init script.`, + Deletes a global init script.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -130,31 +173,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq compute.GetGlobalInitScriptRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetGlobalInitScriptRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetGlobalInitScriptRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get SCRIPT_ID", - Short: `Get an init script.`, - Long: `Get an init script. + cmd.Use = "get SCRIPT_ID" + cmd.Short = `Get an init script.` + cmd.Long = `Get an init script. - Gets all the details of a script, including its Base64-encoded contents.`, + Gets all the details of a script, including its Base64-encoded contents.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -182,32 +246,50 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get init scripts.`, - Long: `Get init scripts. + cmd.Use = "list" + cmd.Short = `Get init scripts.` + cmd.Long = `Get init scripts. Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init - script](#operation/get-script) operation.`, + script](#operation/get-script) operation.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.GlobalInitScripts.ListAll(ctx) @@ -215,39 +297,61 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq compute.GlobalInitScriptUpdateRequest -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *compute.GlobalInitScriptUpdateRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq compute.GlobalInitScriptUpdateRequest + // TODO: short flags - updateCmd.Flags().BoolVar(&updateReq.Enabled, "enabled", updateReq.Enabled, `Specifies whether the script is enabled.`) - updateCmd.Flags().IntVar(&updateReq.Position, "position", updateReq.Position, `The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) + cmd.Flags().BoolVar(&updateReq.Enabled, "enabled", updateReq.Enabled, `Specifies whether the script is enabled.`) + cmd.Flags().IntVar(&updateReq.Position, "position", updateReq.Position, `The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME SCRIPT SCRIPT_ID", - Short: `Update init script.`, - Long: `Update init script. + cmd.Use = "update NAME SCRIPT SCRIPT_ID" + cmd.Short = `Update init script.` + cmd.Long = `Update init script. Updates a global init script, specifying only the fields to change. All fields - are optional. Unspecified fields retain their current value.`, + are optional. Unspecified fields retain their current value.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -260,10 +364,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service GlobalInitScripts diff --git a/cmd/workspace/grants/grants.go b/cmd/workspace/grants/grants.go index 1cc973872..a5ebd7330 100755 --- a/cmd/workspace/grants/grants.go +++ b/cmd/workspace/grants/grants.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "grants", - Short: `In Unity Catalog, data is secure by default.`, - Long: `In Unity Catalog, data is secure by default. Initially, users have no access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "grants", + Short: `In Unity Catalog, data is secure by default.`, + Long: `In Unity Catalog, data is secure by default. Initially, users have no access to data in a metastore. Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are @@ -26,36 +31,53 @@ var Cmd = &cobra.Command{ automatically grants the privilege to all current and future objects within the catalog. Similarly, privileges granted on a schema are inherited by all current and future objects within that schema.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq catalog.GetGrantRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetGrantRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetGrantRequest + // TODO: short flags - getCmd.Flags().StringVar(&getReq.Principal, "principal", getReq.Principal, `If provided, only the permissions for the specified principal (user or group) are returned.`) + cmd.Flags().StringVar(&getReq.Principal, "principal", getReq.Principal, `If provided, only the permissions for the specified principal (user or group) are returned.`) -} - -var getCmd = &cobra.Command{ - Use: "get SECURABLE_TYPE FULL_NAME", - Short: `Get permissions.`, - Long: `Get permissions. + cmd.Use = "get SECURABLE_TYPE FULL_NAME" + cmd.Short = `Get permissions.` + cmd.Long = `Get permissions. - Gets the permissions for a securable.`, + Gets the permissions for a securable.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -70,37 +92,59 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-effective command -var getEffectiveReq catalog.GetEffectiveRequest -func init() { - Cmd.AddCommand(getEffectiveCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getEffectiveOverrides []func( + *cobra.Command, + *catalog.GetEffectiveRequest, +) + +func newGetEffective() *cobra.Command { + cmd := &cobra.Command{} + + var getEffectiveReq catalog.GetEffectiveRequest + // TODO: short flags - getEffectiveCmd.Flags().StringVar(&getEffectiveReq.Principal, "principal", getEffectiveReq.Principal, `If provided, only the effective permissions for the specified principal (user or group) are returned.`) + cmd.Flags().StringVar(&getEffectiveReq.Principal, "principal", getEffectiveReq.Principal, `If provided, only the effective permissions for the specified principal (user or group) are returned.`) -} - -var getEffectiveCmd = &cobra.Command{ - Use: "get-effective SECURABLE_TYPE FULL_NAME", - Short: `Get effective permissions.`, - Long: `Get effective permissions. + cmd.Use = "get-effective SECURABLE_TYPE FULL_NAME" + cmd.Short = `Get effective permissions.` + cmd.Long = `Get effective permissions. - Gets the effective permissions for a securable.`, + Gets the effective permissions for a securable.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -115,39 +159,61 @@ var getEffectiveCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getEffectiveOverrides { + fn(cmd, &getEffectiveReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetEffective()) + }) } // start update command -var updateReq catalog.UpdatePermissions -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdatePermissions, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdatePermissions + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: changes -} - -var updateCmd = &cobra.Command{ - Use: "update SECURABLE_TYPE FULL_NAME", - Short: `Update permissions.`, - Long: `Update permissions. + cmd.Use = "update SECURABLE_TYPE FULL_NAME" + cmd.Short = `Update permissions.` + cmd.Long = `Update permissions. - Updates the permissions for a securable.`, + Updates the permissions for a securable.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -168,10 +234,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Grants diff --git a/cmd/workspace/groups.go b/cmd/workspace/groups.go index 92b9ae946..43159d18b 100644 --- a/cmd/workspace/groups.go +++ b/cmd/workspace/groups.go @@ -1,7 +1,6 @@ package workspace import ( - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" ) @@ -55,11 +54,3 @@ func Groups() []cobra.Group { }, } } - -func init() { - // Register groups with parent command - groups := Groups() - for i := range groups { - root.RootCmd.AddGroup(&groups[i]) - } -} diff --git a/cmd/workspace/groups/groups.go b/cmd/workspace/groups/groups.go index bdadd1125..3f46abbc8 100755 --- a/cmd/workspace/groups/groups.go +++ b/cmd/workspace/groups/groups.go @@ -12,59 +12,81 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "groups", - Short: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.`, - Long: `Groups simplify identity management, making it easier to assign access to +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "groups", + Short: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.`, + Long: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks workspace identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.Group -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.Group + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new group.`, - Long: `Create a new group. + cmd.Use = "create" + cmd.Short = `Create a new group.` + cmd.Long = `Create a new group. Creates a group in the Databricks workspace with a unique name, using the - supplied group details.`, + supplied group details.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,31 +103,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteGroupRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteGroupRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteGroupRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a group.`, - Long: `Delete a group. + cmd.Use = "delete ID" + cmd.Short = `Delete a group.` + cmd.Long = `Delete a group. - Deletes a group from the Databricks workspace.`, + Deletes a group from the Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -133,31 +176,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetGroupRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetGroupRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetGroupRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get group details.`, - Long: `Get group details. + cmd.Use = "get ID" + cmd.Short = `Get group details.` + cmd.Long = `Get group details. - Gets the information for a specific group in the Databricks workspace.`, + Gets the information for a specific group in the Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -185,48 +249,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListGroupsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListGroupsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListGroupsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List group details.`, - Long: `List group details. + cmd.Use = "list" + cmd.Short = `List group details.` + cmd.Long = `List group details. - Gets all details of the groups associated with the Databricks workspace.`, + Gets all details of the groups associated with the Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -243,35 +329,57 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schemas -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update group details.`, - Long: `Update group details. + cmd.Use = "patch ID" + cmd.Short = `Update group details.` + cmd.Long = `Update group details. - Partially updates the details of a group.`, + Partially updates the details of a group.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -305,42 +413,63 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.Group -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.Group + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a group.`, - Long: `Replace a group. + cmd.Use = "update ID" + cmd.Short = `Replace a group.` + cmd.Long = `Replace a group. - Updates the details of a group by replacing the entire group entity.`, + Updates the details of a group by replacing the entire group entity.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -375,10 +504,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Groups diff --git a/cmd/workspace/groups/overrides.go b/cmd/workspace/groups/overrides.go index 28c91c4d2..db9c7610c 100644 --- a/cmd/workspace/groups/overrides.go +++ b/cmd/workspace/groups/overrides.go @@ -1,10 +1,18 @@ package groups -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListGroupsRequest) { listReq.Attributes = "id,displayName" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.DisplayName}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/instance-pools/instance-pools.go b/cmd/workspace/instance-pools/instance-pools.go index 80c091e2a..9e7805aef 100755 --- a/cmd/workspace/instance-pools/instance-pools.go +++ b/cmd/workspace/instance-pools/instance-pools.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "instance-pools", - Short: `Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.`, - Long: `Instance Pools API are used to create, edit, delete and list instance pools by +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "instance-pools", + Short: `Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.`, + Long: `Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. @@ -33,52 +38,68 @@ var Cmd = &cobra.Command{ Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq compute.CreateInstancePool -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.CreateInstancePool, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq compute.CreateInstancePool + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: map via StringToStringVar: custom_tags // TODO: complex arg: disk_spec - createCmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) // TODO: complex arg: gcp_attributes - createCmd.Flags().IntVar(&createReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", createReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) - // TODO: complex arg: instance_pool_fleet_attributes - createCmd.Flags().IntVar(&createReq.MaxCapacity, "max-capacity", createReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) - createCmd.Flags().IntVar(&createReq.MinIdleInstances, "min-idle-instances", createReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) + cmd.Flags().IntVar(&createReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", createReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) + cmd.Flags().IntVar(&createReq.MaxCapacity, "max-capacity", createReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) + cmd.Flags().IntVar(&createReq.MinIdleInstances, "min-idle-instances", createReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) // TODO: array: preloaded_docker_images // TODO: array: preloaded_spark_versions -} - -var createCmd = &cobra.Command{ - Use: "create INSTANCE_POOL_NAME NODE_TYPE_ID", - Short: `Create a new instance pool.`, - Long: `Create a new instance pool. + cmd.Use = "create INSTANCE_POOL_NAME NODE_TYPE_ID" + cmd.Short = `Create a new instance pool.` + cmd.Long = `Create a new instance pool. - Creates a new instance pool using idle and ready-to-use cloud instances.`, + Creates a new instance pool using idle and ready-to-use cloud instances.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -97,34 +118,55 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeleteInstancePool -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeleteInstancePool, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq compute.DeleteInstancePool + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete INSTANCE_POOL_ID", - Short: `Delete an instance pool.`, - Long: `Delete an instance pool. + cmd.Use = "delete INSTANCE_POOL_ID" + cmd.Short = `Delete an instance pool.` + cmd.Long = `Delete an instance pool. Deletes the instance pool permanently. The idle instances in the pool are - terminated asynchronously.`, + terminated asynchronously.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -159,53 +201,74 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq compute.EditInstancePool -var editJson flags.JsonFlag -func init() { - Cmd.AddCommand(editCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.EditInstancePool, +) + +func newEdit() *cobra.Command { + cmd := &cobra.Command{} + + var editReq compute.EditInstancePool + var editJson flags.JsonFlag + // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: map via StringToStringVar: custom_tags // TODO: complex arg: disk_spec - editCmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) // TODO: complex arg: gcp_attributes - editCmd.Flags().IntVar(&editReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", editReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) - // TODO: complex arg: instance_pool_fleet_attributes - editCmd.Flags().IntVar(&editReq.MaxCapacity, "max-capacity", editReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) - editCmd.Flags().IntVar(&editReq.MinIdleInstances, "min-idle-instances", editReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) + cmd.Flags().IntVar(&editReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", editReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) + cmd.Flags().IntVar(&editReq.MaxCapacity, "max-capacity", editReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) + cmd.Flags().IntVar(&editReq.MinIdleInstances, "min-idle-instances", editReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) // TODO: array: preloaded_docker_images // TODO: array: preloaded_spark_versions -} - -var editCmd = &cobra.Command{ - Use: "edit INSTANCE_POOL_ID INSTANCE_POOL_NAME NODE_TYPE_ID", - Short: `Edit an existing instance pool.`, - Long: `Edit an existing instance pool. + cmd.Use = "edit INSTANCE_POOL_ID INSTANCE_POOL_NAME NODE_TYPE_ID" + cmd.Short = `Edit an existing instance pool.` + cmd.Long = `Edit an existing instance pool. - Modifies the configuration of an existing instance pool.`, + Modifies the configuration of an existing instance pool.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -225,31 +288,52 @@ var editCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start get command -var getReq compute.GetInstancePoolRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetInstancePoolRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetInstancePoolRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get INSTANCE_POOL_ID", - Short: `Get instance pool information.`, - Long: `Get instance pool information. + cmd.Use = "get INSTANCE_POOL_ID" + cmd.Short = `Get instance pool information.` + cmd.Long = `Get instance pool information. - Retrieve the information for an instance pool based on its identifier.`, + Retrieve the information for an instance pool based on its identifier.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -277,29 +361,194 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *compute.GetInstancePoolPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq compute.GetInstancePoolPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels INSTANCE_POOL_ID" + cmd.Short = `Get instance pool permission levels.` + cmd.Long = `Get instance pool permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + getPermissionLevelsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *compute.GetInstancePoolPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq compute.GetInstancePoolPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions INSTANCE_POOL_ID" + cmd.Short = `Get instance pool permissions.` + cmd.Long = `Get instance pool permissions. + + Gets the permissions of an instance pool. Instance pools can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + getPermissionsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List instance pool info.`, - Long: `List instance pool info. + cmd.Use = "list" + cmd.Short = `List instance pool info.` + cmd.Long = `List instance pool info. - Gets a list of instance pools with their statistics.`, + Gets a list of instance pools with their statistics.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.InstancePools.ListAll(ctx) @@ -307,10 +556,192 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *compute.InstancePoolPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq compute.InstancePoolPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions INSTANCE_POOL_ID" + cmd.Short = `Set instance pool permissions.` + cmd.Long = `Set instance pool permissions. + + Sets permissions on an instance pool. Instance pools can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + setPermissionsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *compute.InstancePoolPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq compute.InstancePoolPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions INSTANCE_POOL_ID" + cmd.Short = `Update instance pool permissions.` + cmd.Long = `Update instance pool permissions. + + Updates the permissions on an instance pool. Instance pools can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + updatePermissionsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service InstancePools diff --git a/cmd/workspace/instance-pools/overrides.go b/cmd/workspace/instance-pools/overrides.go index 11a76bdd5..f62f8c536 100644 --- a/cmd/workspace/instance-pools/overrides.go +++ b/cmd/workspace/instance-pools/overrides.go @@ -1,9 +1,16 @@ package instance_pools -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.InstancePoolId|green}} {{.InstancePoolName}} {{.NodeTypeId}} {{.State}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/instance-profiles/instance-profiles.go b/cmd/workspace/instance-profiles/instance-profiles.go index ee2d92808..0922a5ae3 100755 --- a/cmd/workspace/instance-profiles/instance-profiles.go +++ b/cmd/workspace/instance-profiles/instance-profiles.go @@ -10,53 +10,75 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "instance-profiles", - Short: `The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.`, - Long: `The Instance Profiles API allows admins to add, list, and remove instance +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "instance-profiles", + Short: `The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.`, + Long: `The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start add command -var addReq compute.AddInstanceProfile -var addJson flags.JsonFlag -func init() { - Cmd.AddCommand(addCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var addOverrides []func( + *cobra.Command, + *compute.AddInstanceProfile, +) + +func newAdd() *cobra.Command { + cmd := &cobra.Command{} + + var addReq compute.AddInstanceProfile + var addJson flags.JsonFlag + // TODO: short flags - addCmd.Flags().Var(&addJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&addJson, "json", `either inline JSON string or @path/to/file.json with request body`) - addCmd.Flags().StringVar(&addReq.IamRoleArn, "iam-role-arn", addReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) - addCmd.Flags().BoolVar(&addReq.IsMetaInstanceProfile, "is-meta-instance-profile", addReq.IsMetaInstanceProfile, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) - addCmd.Flags().BoolVar(&addReq.SkipValidation, "skip-validation", addReq.SkipValidation, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) + cmd.Flags().StringVar(&addReq.IamRoleArn, "iam-role-arn", addReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) + cmd.Flags().BoolVar(&addReq.IsMetaInstanceProfile, "is-meta-instance-profile", addReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) + cmd.Flags().BoolVar(&addReq.SkipValidation, "skip-validation", addReq.SkipValidation, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) -} - -var addCmd = &cobra.Command{ - Use: "add INSTANCE_PROFILE_ARN", - Short: `Register an instance profile.`, - Long: `Register an instance profile. + cmd.Use = "add INSTANCE_PROFILE_ARN" + cmd.Short = `Register an instance profile.` + cmd.Long = `Register an instance profile. In the UI, you can select the instance profile when launching clusters. This - API is only available to admin users.`, + API is only available to admin users.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,30 +96,50 @@ var addCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range addOverrides { + fn(cmd, &addReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newAdd()) + }) } // start edit command -var editReq compute.InstanceProfile -var editJson flags.JsonFlag -func init() { - Cmd.AddCommand(editCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.InstanceProfile, +) + +func newEdit() *cobra.Command { + cmd := &cobra.Command{} + + var editReq compute.InstanceProfile + var editJson flags.JsonFlag + // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().StringVar(&editReq.IamRoleArn, "iam-role-arn", editReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) - editCmd.Flags().BoolVar(&editReq.IsMetaInstanceProfile, "is-meta-instance-profile", editReq.IsMetaInstanceProfile, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) + cmd.Flags().StringVar(&editReq.IamRoleArn, "iam-role-arn", editReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) + cmd.Flags().BoolVar(&editReq.IsMetaInstanceProfile, "is-meta-instance-profile", editReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) -} - -var editCmd = &cobra.Command{ - Use: "edit INSTANCE_PROFILE_ARN", - Short: `Edit an instance profile.`, - Long: `Edit an instance profile. + cmd.Use = "edit INSTANCE_PROFILE_ARN" + cmd.Short = `Edit an instance profile.` + cmd.Long = `Edit an instance profile. The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both @@ -113,18 +155,20 @@ var editCmd = &cobra.Command{ This API is only available to admin users. [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html - [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html`, + [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -142,31 +186,49 @@ var editCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List available instance profiles.`, - Long: `List available instance profiles. + cmd.Use = "list" + cmd.Short = `List available instance profiles.` + cmd.Long = `List available instance profiles. List the instance profiles that the calling user can use to launch a cluster. - This API is available to all users.`, + This API is available to all users.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.InstanceProfiles.ListAll(ctx) @@ -174,43 +236,65 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start remove command -var removeReq compute.RemoveInstanceProfile -var removeJson flags.JsonFlag -func init() { - Cmd.AddCommand(removeCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var removeOverrides []func( + *cobra.Command, + *compute.RemoveInstanceProfile, +) + +func newRemove() *cobra.Command { + cmd := &cobra.Command{} + + var removeReq compute.RemoveInstanceProfile + var removeJson flags.JsonFlag + // TODO: short flags - removeCmd.Flags().Var(&removeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&removeJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var removeCmd = &cobra.Command{ - Use: "remove INSTANCE_PROFILE_ARN", - Short: `Remove the instance profile.`, - Long: `Remove the instance profile. + cmd.Use = "remove INSTANCE_PROFILE_ARN" + cmd.Short = `Remove the instance profile.` + cmd.Long = `Remove the instance profile. Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. - This API is only accessible to admin users.`, + This API is only accessible to admin users.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -228,10 +312,24 @@ var removeCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range removeOverrides { + fn(cmd, &removeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRemove()) + }) } // end service InstanceProfiles diff --git a/cmd/workspace/instance-profiles/overrides.go b/cmd/workspace/instance-profiles/overrides.go index 3b5cbd1c6..adf060528 100644 --- a/cmd/workspace/instance-profiles/overrides.go +++ b/cmd/workspace/instance-profiles/overrides.go @@ -1,9 +1,16 @@ package instance_profiles -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.InstanceProfileArn}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/ip-access-lists/ip-access-lists.go b/cmd/workspace/ip-access-lists/ip-access-lists.go index 94bd110d0..7bda0ef02 100755 --- a/cmd/workspace/ip-access-lists/ip-access-lists.go +++ b/cmd/workspace/ip-access-lists/ip-access-lists.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "ip-access-lists", - Short: `IP Access List enables admins to configure IP access lists.`, - Long: `IP Access List enables admins to configure IP access lists. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "ip-access-lists", + Short: `IP Access List enables admins to configure IP access lists.`, + Long: `IP Access List enables admins to configure IP access lists. IP access lists affect web application access and REST API access to this workspace only. If the feature is disabled for a workspace, all access is @@ -36,26 +41,41 @@ var Cmd = &cobra.Command{ After changes to the IP access list feature, it can take a few minutes for changes to take effect.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq settings.CreateIpAccessList -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *settings.CreateIpAccessList, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq settings.CreateIpAccessList + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create access list.`, - Long: `Create access list. + cmd.Use = "create" + cmd.Short = `Create access list.` + cmd.Long = `Create access list. Creates an IP access list for this workspace. @@ -72,11 +92,12 @@ var createCmd = &cobra.Command{ It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no effect until you enable the feature. See - :method:workspaceconf/setStatus`, + :method:workspaceconf/setStatus` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -94,31 +115,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq settings.DeleteIpAccessListRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteIpAccessListRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteIpAccessListRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete IP_ACCESS_LIST_ID", - Short: `Delete access list.`, - Long: `Delete access list. + cmd.Use = "delete IP_ACCESS_LIST_ID" + cmd.Short = `Delete access list.` + cmd.Long = `Delete access list. - Deletes an IP access list, specified by its list ID.`, + Deletes an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -146,31 +188,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq settings.GetIpAccessListRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetIpAccessListRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetIpAccessListRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get IP_ACCESS_LIST_ID", - Short: `Get access list.`, - Long: `Get access list. + cmd.Use = "get IP_ACCESS_LIST_ID" + cmd.Short = `Get access list.` + cmd.Long = `Get access list. - Gets an IP access list, specified by its list ID.`, + Gets an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -198,29 +261,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get access lists.`, - Long: `Get access lists. + cmd.Use = "list" + cmd.Short = `Get access lists.` + cmd.Long = `Get access lists. - Gets all IP access lists for the specified workspace.`, + Gets all IP access lists for the specified workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.IpAccessLists.ListAll(ctx) @@ -228,29 +309,49 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start replace command -var replaceReq settings.ReplaceIpAccessList -var replaceJson flags.JsonFlag -func init() { - Cmd.AddCommand(replaceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var replaceOverrides []func( + *cobra.Command, + *settings.ReplaceIpAccessList, +) + +func newReplace() *cobra.Command { + cmd := &cobra.Command{} + + var replaceReq settings.ReplaceIpAccessList + var replaceJson flags.JsonFlag + // TODO: short flags - replaceCmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) - replaceCmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var replaceCmd = &cobra.Command{ - Use: "replace", - Short: `Replace access list.`, - Long: `Replace access list. + cmd.Use = "replace" + cmd.Short = `Replace access list.` + cmd.Long = `Replace access list. Replaces an IP access list, specified by its ID. @@ -263,11 +364,12 @@ var replaceCmd = &cobra.Command{ calling user's current IP, error 400 is returned with error_code value INVALID_STATE. It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the - feature. See :method:workspaceconf/setStatus.`, + feature. See :method:workspaceconf/setStatus.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -285,29 +387,49 @@ var replaceCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range replaceOverrides { + fn(cmd, &replaceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReplace()) + }) } // start update command -var updateReq settings.UpdateIpAccessList -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *settings.UpdateIpAccessList, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq settings.UpdateIpAccessList + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update access list.`, - Long: `Update access list. + cmd.Use = "update" + cmd.Short = `Update access list.` + cmd.Long = `Update access list. Updates an existing IP access list, specified by its ID. @@ -324,11 +446,12 @@ var updateCmd = &cobra.Command{ It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See - :method:workspaceconf/setStatus.`, + :method:workspaceconf/setStatus.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -346,10 +469,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service IpAccessLists diff --git a/cmd/workspace/ip-access-lists/overrides.go b/cmd/workspace/ip-access-lists/overrides.go index abea3032f..ab4db1ec6 100644 --- a/cmd/workspace/ip-access-lists/overrides.go +++ b/cmd/workspace/ip-access-lists/overrides.go @@ -1,10 +1,17 @@ package ip_access_lists -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { // this command still has no Web UI listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.ListId|green}} {{.Label}} {{join .IpAddresses ","}} {{if eq .ListType "ALLOW"}}{{"ALLOW"|green}}{{else}}{{"BLOCK"|red}}{{end}} {{if .Enabled}}{{"ENABLED"|green}}{{else}}{{"DISABLED"|red}}{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/jobs/jobs.go b/cmd/workspace/jobs/jobs.go index 1dfb065b1..7670ebb77 100755 --- a/cmd/workspace/jobs/jobs.go +++ b/cmd/workspace/jobs/jobs.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "jobs", - Short: `The Jobs API allows you to create, edit, and delete jobs.`, - Long: `The Jobs API allows you to create, edit, and delete jobs. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "jobs", + Short: `The Jobs API allows you to create, edit, and delete jobs.`, + Long: `The Jobs API allows you to create, edit, and delete jobs. You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single @@ -34,33 +39,49 @@ var Cmd = &cobra.Command{ [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets`, - Annotations: map[string]string{ - "package": "jobs", - }, + GroupID: "jobs", + Annotations: map[string]string{ + "package": "jobs", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start cancel-all-runs command -var cancelAllRunsReq jobs.CancelAllRuns -var cancelAllRunsJson flags.JsonFlag -func init() { - Cmd.AddCommand(cancelAllRunsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cancelAllRunsOverrides []func( + *cobra.Command, + *jobs.CancelAllRuns, +) + +func newCancelAllRuns() *cobra.Command { + cmd := &cobra.Command{} + + var cancelAllRunsReq jobs.CancelAllRuns + var cancelAllRunsJson flags.JsonFlag + // TODO: short flags - cancelAllRunsCmd.Flags().Var(&cancelAllRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&cancelAllRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var cancelAllRunsCmd = &cobra.Command{ - Use: "cancel-all-runs JOB_ID", - Short: `Cancel all runs of a job.`, - Long: `Cancel all runs of a job. + cmd.Use = "cancel-all-runs JOB_ID" + cmd.Short = `Cancel all runs of a job.` + cmd.Long = `Cancel all runs of a job. Cancels all active runs of a job. The runs are canceled asynchronously, so it - doesn't prevent new runs from being started.`, + doesn't prevent new runs from being started.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -98,40 +119,60 @@ var cancelAllRunsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range cancelAllRunsOverrides { + fn(cmd, &cancelAllRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCancelAllRuns()) + }) } // start cancel-run command -var cancelRunReq jobs.CancelRun -var cancelRunJson flags.JsonFlag -var cancelRunSkipWait bool -var cancelRunTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cancelRunOverrides []func( + *cobra.Command, + *jobs.CancelRun, +) -func init() { - Cmd.AddCommand(cancelRunCmd) +func newCancelRun() *cobra.Command { + cmd := &cobra.Command{} - cancelRunCmd.Flags().BoolVar(&cancelRunSkipWait, "no-wait", cancelRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - cancelRunCmd.Flags().DurationVar(&cancelRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var cancelRunReq jobs.CancelRun + var cancelRunJson flags.JsonFlag + + var cancelRunSkipWait bool + var cancelRunTimeout time.Duration + + cmd.Flags().BoolVar(&cancelRunSkipWait, "no-wait", cancelRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&cancelRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - cancelRunCmd.Flags().Var(&cancelRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&cancelRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var cancelRunCmd = &cobra.Command{ - Use: "cancel-run RUN_ID", - Short: `Cancel a job run.`, - Long: `Cancel a job run. + cmd.Use = "cancel-run RUN_ID" + cmd.Short = `Cancel a job run.` + cmd.Long = `Cancel a job run. Cancels a job run. The run is canceled asynchronously, so it may still be - running when this request completes.`, + running when this request completes.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -188,59 +229,62 @@ var cancelRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range cancelRunOverrides { + fn(cmd, &cancelRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCancelRun()) + }) } // start create command -var createReq jobs.CreateJob -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *jobs.CreateJob, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq jobs.CreateJob + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: access_control_list - // TODO: array: compute - // TODO: complex arg: continuous - // TODO: complex arg: email_notifications - createCmd.Flags().Var(&createReq.Format, "format", `Used to tell what is the format of the job.`) - // TODO: complex arg: git_source - // TODO: array: job_clusters - createCmd.Flags().IntVar(&createReq.MaxConcurrentRuns, "max-concurrent-runs", createReq.MaxConcurrentRuns, `An optional maximum allowed number of concurrent runs of the job.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `An optional name for the job.`) - // TODO: complex arg: notification_settings - // TODO: array: parameters - // TODO: complex arg: run_as - // TODO: complex arg: schedule - // TODO: map via StringToStringVar: tags - // TODO: array: tasks - createCmd.Flags().IntVar(&createReq.TimeoutSeconds, "timeout-seconds", createReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) - // TODO: complex arg: trigger - // TODO: complex arg: webhook_notifications - -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new job.`, - Long: `Create a new job. + cmd.Use = "create" + cmd.Short = `Create a new job.` + cmd.Long = `Create a new job. - Create a new job.`, + Create a new job.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -250,6 +294,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Jobs.Create(ctx, createReq) @@ -257,33 +302,54 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq jobs.DeleteJob -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *jobs.DeleteJob, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq jobs.DeleteJob + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete JOB_ID", - Short: `Delete a job.`, - Long: `Delete a job. + cmd.Use = "delete JOB_ID" + cmd.Short = `Delete a job.` + cmd.Long = `Delete a job. - Deletes a job.`, + Deletes a job.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -321,33 +387,54 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start delete-run command -var deleteRunReq jobs.DeleteRun -var deleteRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteRunOverrides []func( + *cobra.Command, + *jobs.DeleteRun, +) + +func newDeleteRun() *cobra.Command { + cmd := &cobra.Command{} + + var deleteRunReq jobs.DeleteRun + var deleteRunJson flags.JsonFlag + // TODO: short flags - deleteRunCmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteRunCmd = &cobra.Command{ - Use: "delete-run RUN_ID", - Short: `Delete a job run.`, - Long: `Delete a job run. + cmd.Use = "delete-run RUN_ID" + cmd.Short = `Delete a job run.` + cmd.Long = `Delete a job run. - Deletes a non-active run. Returns an error if the run is active.`, + Deletes a non-active run. Returns an error if the run is active.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -385,33 +472,54 @@ var deleteRunCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteRunOverrides { + fn(cmd, &deleteRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteRun()) + }) } // start export-run command -var exportRunReq jobs.ExportRunRequest -func init() { - Cmd.AddCommand(exportRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var exportRunOverrides []func( + *cobra.Command, + *jobs.ExportRunRequest, +) + +func newExportRun() *cobra.Command { + cmd := &cobra.Command{} + + var exportRunReq jobs.ExportRunRequest + // TODO: short flags - exportRunCmd.Flags().Var(&exportRunReq.ViewsToExport, "views-to-export", `Which views to export (CODE, DASHBOARDS, or ALL).`) + cmd.Flags().Var(&exportRunReq.ViewsToExport, "views-to-export", `Which views to export (CODE, DASHBOARDS, or ALL).`) -} - -var exportRunCmd = &cobra.Command{ - Use: "export-run RUN_ID", - Short: `Export and retrieve a job run.`, - Long: `Export and retrieve a job run. + cmd.Use = "export-run RUN_ID" + cmd.Short = `Export and retrieve a job run.` + cmd.Long = `Export and retrieve a job run. - Export and retrieve the job run task.`, + Export and retrieve the job run task.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -442,31 +550,52 @@ var exportRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range exportRunOverrides { + fn(cmd, &exportRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExportRun()) + }) } // start get command -var getReq jobs.GetJobRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *jobs.GetJobRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq jobs.GetJobRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get JOB_ID", - Short: `Get a single job.`, - Long: `Get a single job. + cmd.Use = "get JOB_ID" + cmd.Short = `Get a single job.` + cmd.Long = `Get a single job. - Retrieves the details for a single job.`, + Retrieves the details for a single job.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -497,39 +626,206 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *jobs.GetJobPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq jobs.GetJobPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels JOB_ID" + cmd.Short = `Get job permission levels.` + cmd.Long = `Get job permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + getPermissionLevelsReq.JobId = args[0] + + response, err := w.Jobs.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *jobs.GetJobPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq jobs.GetJobPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions JOB_ID" + cmd.Short = `Get job permissions.` + cmd.Long = `Get job permissions. + + Gets the permissions of a job. Jobs can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + getPermissionsReq.JobId = args[0] + + response, err := w.Jobs.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start get-run command -var getRunReq jobs.GetRunRequest -var getRunSkipWait bool -var getRunTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRunOverrides []func( + *cobra.Command, + *jobs.GetRunRequest, +) -func init() { - Cmd.AddCommand(getRunCmd) +func newGetRun() *cobra.Command { + cmd := &cobra.Command{} - getRunCmd.Flags().BoolVar(&getRunSkipWait, "no-wait", getRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - getRunCmd.Flags().DurationVar(&getRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var getRunReq jobs.GetRunRequest + + var getRunSkipWait bool + var getRunTimeout time.Duration + + cmd.Flags().BoolVar(&getRunSkipWait, "no-wait", getRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&getRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - getRunCmd.Flags().BoolVar(&getRunReq.IncludeHistory, "include-history", getRunReq.IncludeHistory, `Whether to include the repair history in the response.`) + cmd.Flags().BoolVar(&getRunReq.IncludeHistory, "include-history", getRunReq.IncludeHistory, `Whether to include the repair history in the response.`) -} - -var getRunCmd = &cobra.Command{ - Use: "get-run RUN_ID", - Short: `Get a single job run.`, - Long: `Get a single job run. + cmd.Use = "get-run RUN_ID" + cmd.Short = `Get a single job run.` + cmd.Long = `Get a single job run. - Retrieve the metadata of a run.`, + Retrieve the metadata of a run.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -560,25 +856,45 @@ var getRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRunOverrides { + fn(cmd, &getRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRun()) + }) } // start get-run-output command -var getRunOutputReq jobs.GetRunOutputRequest -func init() { - Cmd.AddCommand(getRunOutputCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRunOutputOverrides []func( + *cobra.Command, + *jobs.GetRunOutputRequest, +) + +func newGetRunOutput() *cobra.Command { + cmd := &cobra.Command{} + + var getRunOutputReq jobs.GetRunOutputRequest + // TODO: short flags -} - -var getRunOutputCmd = &cobra.Command{ - Use: "get-run-output RUN_ID", - Short: `Get the output for a single run.`, - Long: `Get the output for a single run. + cmd.Use = "get-run-output RUN_ID" + cmd.Short = `Get the output for a single run.` + cmd.Long = `Get the output for a single run. Retrieve the output and metadata of a single task run. When a notebook task returns a value through the dbutils.notebook.exit() call, you can use this @@ -589,11 +905,12 @@ var getRunOutputCmd = &cobra.Command{ This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to reference them beyond - 60 days, you must save old run results before they expire.`, + 60 days, you must save old run results before they expire.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -624,46 +941,68 @@ var getRunOutputCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRunOutputOverrides { + fn(cmd, &getRunOutputReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRunOutput()) + }) } // start list command -var listReq jobs.ListJobsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *jobs.ListJobsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq jobs.ListJobsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().BoolVar(&listReq.ExpandTasks, "expand-tasks", listReq.ExpandTasks, `Whether to include task and cluster details in the response.`) - listCmd.Flags().IntVar(&listReq.Limit, "limit", listReq.Limit, `The number of jobs to return.`) - listCmd.Flags().StringVar(&listReq.Name, "name", listReq.Name, `A filter on the list based on the exact (case insensitive) job name.`) - listCmd.Flags().IntVar(&listReq.Offset, "offset", listReq.Offset, `The offset of the first job to return, relative to the most recently created job.`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of jobs respectively.`) + cmd.Flags().BoolVar(&listReq.ExpandTasks, "expand-tasks", listReq.ExpandTasks, `Whether to include task and cluster details in the response.`) + cmd.Flags().IntVar(&listReq.Limit, "limit", listReq.Limit, `The number of jobs to return.`) + cmd.Flags().StringVar(&listReq.Name, "name", listReq.Name, `A filter on the list based on the exact (case insensitive) job name.`) + cmd.Flags().IntVar(&listReq.Offset, "offset", listReq.Offset, `The offset of the first job to return, relative to the most recently created job.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of jobs respectively.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List jobs.`, - Long: `List jobs. + cmd.Use = "list" + cmd.Short = `List jobs.` + cmd.Long = `List jobs. - Retrieves a list of jobs.`, + Retrieves a list of jobs.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -680,51 +1019,73 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-runs command -var listRunsReq jobs.ListRunsRequest -var listRunsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listRunsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listRunsOverrides []func( + *cobra.Command, + *jobs.ListRunsRequest, +) + +func newListRuns() *cobra.Command { + cmd := &cobra.Command{} + + var listRunsReq jobs.ListRunsRequest + var listRunsJson flags.JsonFlag + // TODO: short flags - listRunsCmd.Flags().Var(&listRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listRunsCmd.Flags().BoolVar(&listRunsReq.ActiveOnly, "active-only", listRunsReq.ActiveOnly, `If active_only is true, only active runs are included in the results; otherwise, lists both active and completed runs.`) - listRunsCmd.Flags().BoolVar(&listRunsReq.CompletedOnly, "completed-only", listRunsReq.CompletedOnly, `If completed_only is true, only completed runs are included in the results; otherwise, lists both active and completed runs.`) - listRunsCmd.Flags().BoolVar(&listRunsReq.ExpandTasks, "expand-tasks", listRunsReq.ExpandTasks, `Whether to include task and cluster details in the response.`) - listRunsCmd.Flags().Int64Var(&listRunsReq.JobId, "job-id", listRunsReq.JobId, `The job for which to list runs.`) - listRunsCmd.Flags().IntVar(&listRunsReq.Limit, "limit", listRunsReq.Limit, `The number of runs to return.`) - listRunsCmd.Flags().IntVar(&listRunsReq.Offset, "offset", listRunsReq.Offset, `The offset of the first run to return, relative to the most recent run.`) - listRunsCmd.Flags().StringVar(&listRunsReq.PageToken, "page-token", listRunsReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of runs respectively.`) - listRunsCmd.Flags().Var(&listRunsReq.RunType, "run-type", `The type of runs to return.`) - listRunsCmd.Flags().IntVar(&listRunsReq.StartTimeFrom, "start-time-from", listRunsReq.StartTimeFrom, `Show runs that started _at or after_ this value.`) - listRunsCmd.Flags().IntVar(&listRunsReq.StartTimeTo, "start-time-to", listRunsReq.StartTimeTo, `Show runs that started _at or before_ this value.`) + cmd.Flags().BoolVar(&listRunsReq.ActiveOnly, "active-only", listRunsReq.ActiveOnly, `If active_only is true, only active runs are included in the results; otherwise, lists both active and completed runs.`) + cmd.Flags().BoolVar(&listRunsReq.CompletedOnly, "completed-only", listRunsReq.CompletedOnly, `If completed_only is true, only completed runs are included in the results; otherwise, lists both active and completed runs.`) + cmd.Flags().BoolVar(&listRunsReq.ExpandTasks, "expand-tasks", listRunsReq.ExpandTasks, `Whether to include task and cluster details in the response.`) + cmd.Flags().Int64Var(&listRunsReq.JobId, "job-id", listRunsReq.JobId, `The job for which to list runs.`) + cmd.Flags().IntVar(&listRunsReq.Limit, "limit", listRunsReq.Limit, `The number of runs to return.`) + cmd.Flags().IntVar(&listRunsReq.Offset, "offset", listRunsReq.Offset, `The offset of the first run to return, relative to the most recent run.`) + cmd.Flags().StringVar(&listRunsReq.PageToken, "page-token", listRunsReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of runs respectively.`) + cmd.Flags().Var(&listRunsReq.RunType, "run-type", `The type of runs to return.`) + cmd.Flags().IntVar(&listRunsReq.StartTimeFrom, "start-time-from", listRunsReq.StartTimeFrom, `Show runs that started _at or after_ this value.`) + cmd.Flags().IntVar(&listRunsReq.StartTimeTo, "start-time-to", listRunsReq.StartTimeTo, `Show runs that started _at or before_ this value.`) -} - -var listRunsCmd = &cobra.Command{ - Use: "list-runs", - Short: `List job runs.`, - Long: `List job runs. + cmd.Use = "list-runs" + cmd.Short = `List job runs.` + cmd.Long = `List job runs. - List runs in descending order by start time.`, + List runs in descending order by start time.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -741,54 +1102,74 @@ var listRunsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listRunsOverrides { + fn(cmd, &listRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListRuns()) + }) } // start repair-run command -var repairRunReq jobs.RepairRun -var repairRunJson flags.JsonFlag -var repairRunSkipWait bool -var repairRunTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var repairRunOverrides []func( + *cobra.Command, + *jobs.RepairRun, +) -func init() { - Cmd.AddCommand(repairRunCmd) +func newRepairRun() *cobra.Command { + cmd := &cobra.Command{} - repairRunCmd.Flags().BoolVar(&repairRunSkipWait, "no-wait", repairRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - repairRunCmd.Flags().DurationVar(&repairRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var repairRunReq jobs.RepairRun + var repairRunJson flags.JsonFlag + + var repairRunSkipWait bool + var repairRunTimeout time.Duration + + cmd.Flags().BoolVar(&repairRunSkipWait, "no-wait", repairRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&repairRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - repairRunCmd.Flags().Var(&repairRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&repairRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: dbt_commands // TODO: array: jar_params - repairRunCmd.Flags().Int64Var(&repairRunReq.LatestRepairId, "latest-repair-id", repairRunReq.LatestRepairId, `The ID of the latest repair.`) + cmd.Flags().Int64Var(&repairRunReq.LatestRepairId, "latest-repair-id", repairRunReq.LatestRepairId, `The ID of the latest repair.`) // TODO: map via StringToStringVar: notebook_params // TODO: complex arg: pipeline_params // TODO: map via StringToStringVar: python_named_params // TODO: array: python_params - repairRunCmd.Flags().BoolVar(&repairRunReq.RerunAllFailedTasks, "rerun-all-failed-tasks", repairRunReq.RerunAllFailedTasks, `If true, repair all failed tasks.`) - repairRunCmd.Flags().BoolVar(&repairRunReq.RerunDependentTasks, "rerun-dependent-tasks", repairRunReq.RerunDependentTasks, `If true, repair all tasks that depend on the tasks in rerun_tasks, even if they were previously successful.`) + cmd.Flags().BoolVar(&repairRunReq.RerunAllFailedTasks, "rerun-all-failed-tasks", repairRunReq.RerunAllFailedTasks, `If true, repair all failed tasks.`) + cmd.Flags().BoolVar(&repairRunReq.RerunDependentTasks, "rerun-dependent-tasks", repairRunReq.RerunDependentTasks, `If true, repair all tasks that depend on the tasks in rerun_tasks, even if they were previously successful.`) // TODO: array: rerun_tasks // TODO: array: spark_submit_params // TODO: map via StringToStringVar: sql_params -} - -var repairRunCmd = &cobra.Command{ - Use: "repair-run RUN_ID", - Short: `Repair a job run.`, - Long: `Repair a job run. + cmd.Use = "repair-run RUN_ID" + cmd.Short = `Repair a job run.` + cmd.Long = `Repair a job run. Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history - for the original job run.`, + for the original job run.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -845,34 +1226,55 @@ var repairRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range repairRunOverrides { + fn(cmd, &repairRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRepairRun()) + }) } // start reset command -var resetReq jobs.ResetJob -var resetJson flags.JsonFlag -func init() { - Cmd.AddCommand(resetCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resetOverrides []func( + *cobra.Command, + *jobs.ResetJob, +) + +func newReset() *cobra.Command { + cmd := &cobra.Command{} + + var resetReq jobs.ResetJob + var resetJson flags.JsonFlag + // TODO: short flags - resetCmd.Flags().Var(&resetJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&resetJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var resetCmd = &cobra.Command{ - Use: "reset", - Short: `Overwrites all settings for a job.`, - Long: `Overwrites all settings for a job. + cmd.Use = "reset" + cmd.Short = `Overwrites all settings for a job.` + cmd.Long = `Overwrites all settings for a job. Overwrites all the settings for a specific job. Use the Update endpoint to - update job settings partially.`, + update job settings partially.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -890,29 +1292,51 @@ var resetCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resetOverrides { + fn(cmd, &resetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReset()) + }) } // start run-now command -var runNowReq jobs.RunNow -var runNowJson flags.JsonFlag -var runNowSkipWait bool -var runNowTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var runNowOverrides []func( + *cobra.Command, + *jobs.RunNow, +) -func init() { - Cmd.AddCommand(runNowCmd) +func newRunNow() *cobra.Command { + cmd := &cobra.Command{} - runNowCmd.Flags().BoolVar(&runNowSkipWait, "no-wait", runNowSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - runNowCmd.Flags().DurationVar(&runNowTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var runNowReq jobs.RunNow + var runNowJson flags.JsonFlag + + var runNowSkipWait bool + var runNowTimeout time.Duration + + cmd.Flags().BoolVar(&runNowSkipWait, "no-wait", runNowSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&runNowTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - runNowCmd.Flags().Var(&runNowJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&runNowJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: dbt_commands - runNowCmd.Flags().StringVar(&runNowReq.IdempotencyToken, "idempotency-token", runNowReq.IdempotencyToken, `An optional token to guarantee the idempotency of job run requests.`) + cmd.Flags().StringVar(&runNowReq.IdempotencyToken, "idempotency-token", runNowReq.IdempotencyToken, `An optional token to guarantee the idempotency of job run requests.`) // TODO: array: jar_params // TODO: array: job_parameters // TODO: map via StringToStringVar: notebook_params @@ -922,18 +1346,16 @@ func init() { // TODO: array: spark_submit_params // TODO: map via StringToStringVar: sql_params -} - -var runNowCmd = &cobra.Command{ - Use: "run-now JOB_ID", - Short: `Trigger a new job run.`, - Long: `Trigger a new job run. + cmd.Use = "run-now JOB_ID" + cmd.Short = `Trigger a new job run.` + cmd.Long = `Trigger a new job run. - Run a job and return the run_id of the triggered run.`, + Run a job and return the run_id of the triggered run.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -990,58 +1412,165 @@ var runNowCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range runNowOverrides { + fn(cmd, &runNowReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRunNow()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *jobs.JobPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq jobs.JobPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions JOB_ID" + cmd.Short = `Set job permissions.` + cmd.Long = `Set job permissions. + + Sets permissions on a job. Jobs can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + setPermissionsReq.JobId = args[0] + + response, err := w.Jobs.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start submit command -var submitReq jobs.SubmitRun -var submitJson flags.JsonFlag -var submitSkipWait bool -var submitTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var submitOverrides []func( + *cobra.Command, + *jobs.SubmitRun, +) -func init() { - Cmd.AddCommand(submitCmd) +func newSubmit() *cobra.Command { + cmd := &cobra.Command{} - submitCmd.Flags().BoolVar(&submitSkipWait, "no-wait", submitSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - submitCmd.Flags().DurationVar(&submitTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var submitReq jobs.SubmitRun + var submitJson flags.JsonFlag + + var submitSkipWait bool + var submitTimeout time.Duration + + cmd.Flags().BoolVar(&submitSkipWait, "no-wait", submitSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&submitTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - submitCmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list + // TODO: complex arg: email_notifications // TODO: complex arg: git_source - submitCmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`) + // TODO: complex arg: health + cmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`) // TODO: complex arg: notification_settings - submitCmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`) + cmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`) // TODO: array: tasks - submitCmd.Flags().IntVar(&submitReq.TimeoutSeconds, "timeout-seconds", submitReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) + cmd.Flags().IntVar(&submitReq.TimeoutSeconds, "timeout-seconds", submitReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) // TODO: complex arg: webhook_notifications -} - -var submitCmd = &cobra.Command{ - Use: "submit", - Short: `Create and trigger a one-time run.`, - Long: `Create and trigger a one-time run. + cmd.Use = "submit" + cmd.Short = `Create and trigger a one-time run.` + cmd.Long = `Create and trigger a one-time run. Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the jobs/runs/get API to check the run state after the job is - submitted.`, + submitted.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1077,37 +1606,58 @@ var submitCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range submitOverrides { + fn(cmd, &submitReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSubmit()) + }) } // start update command -var updateReq jobs.UpdateJob -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *jobs.UpdateJob, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq jobs.UpdateJob + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: fields_to_remove // TODO: complex arg: new_settings -} - -var updateCmd = &cobra.Command{ - Use: "update JOB_ID", - Short: `Partially update a job.`, - Long: `Partially update a job. + cmd.Use = "update JOB_ID" + cmd.Short = `Partially update a job.` + cmd.Long = `Partially update a job. Add, update, or remove specific settings of an existing job. Use the ResetJob - to overwrite all job settings.`, + to overwrite all job settings.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1145,10 +1695,108 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *jobs.JobPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq jobs.JobPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions JOB_ID" + cmd.Short = `Update job permissions.` + cmd.Long = `Update job permissions. + + Updates the permissions on a job. Jobs can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + updatePermissionsReq.JobId = args[0] + + response, err := w.Jobs.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Jobs diff --git a/cmd/workspace/jobs/overrides.go b/cmd/workspace/jobs/overrides.go index 93512c84a..fd22dcbdb 100644 --- a/cmd/workspace/jobs/overrides.go +++ b/cmd/workspace/jobs/overrides.go @@ -1,14 +1,25 @@ package jobs -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *jobs.ListJobsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{green "%d" .JobId}} {{.Settings.Name}} {{end}}`) +} +func listRunsOverride(listRunsCmd *cobra.Command, listRunsReq *jobs.ListRunsRequest) { listRunsCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Job ID"}} {{header "Run ID"}} {{header "Result State"}} URL {{range .}}{{green "%d" .JobId}} {{cyan "%d" .RunId}} {{if eq .State.ResultState "SUCCESS"}}{{"SUCCESS"|green}}{{else}}{{red "%s" .State.ResultState}}{{end}} {{.RunPageUrl}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) + listRunsOverrides = append(listRunsOverrides, listRunsOverride) +} diff --git a/cmd/workspace/libraries/libraries.go b/cmd/workspace/libraries/libraries.go index 70b5584ab..92671dc3e 100755 --- a/cmd/workspace/libraries/libraries.go +++ b/cmd/workspace/libraries/libraries.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "libraries", - Short: `The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.`, - Long: `The Libraries API allows you to install and uninstall libraries and get the +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "libraries", + Short: `The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.`, + Long: `The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. To make third-party or custom code available to notebooks and jobs running on @@ -35,30 +40,43 @@ var Cmd = &cobra.Command{ When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start all-cluster-statuses command -func init() { - Cmd.AddCommand(allClusterStatusesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var allClusterStatusesOverrides []func( + *cobra.Command, +) -} +func newAllClusterStatuses() *cobra.Command { + cmd := &cobra.Command{} -var allClusterStatusesCmd = &cobra.Command{ - Use: "all-cluster-statuses", - Short: `Get all statuses.`, - Long: `Get all statuses. + cmd.Use = "all-cluster-statuses" + cmd.Short = `Get all statuses.` + cmd.Long = `Get all statuses. Get the status of all libraries on all clusters. A status will be available for all libraries installed on this cluster via the API or the libraries UI as - well as libraries set to be installed on all clusters via the libraries UI.`, + well as libraries set to be installed on all clusters via the libraries UI.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Libraries.AllClusterStatuses(ctx) @@ -66,25 +84,45 @@ var allClusterStatusesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range allClusterStatusesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newAllClusterStatuses()) + }) } // start cluster-status command -var clusterStatusReq compute.ClusterStatusRequest -func init() { - Cmd.AddCommand(clusterStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var clusterStatusOverrides []func( + *cobra.Command, + *compute.ClusterStatusRequest, +) + +func newClusterStatus() *cobra.Command { + cmd := &cobra.Command{} + + var clusterStatusReq compute.ClusterStatusRequest + // TODO: short flags -} - -var clusterStatusCmd = &cobra.Command{ - Use: "cluster-status CLUSTER_ID", - Short: `Get status.`, - Long: `Get status. + cmd.Use = "cluster-status CLUSTER_ID" + cmd.Short = `Get status.` + cmd.Long = `Get status. Get the status of libraries on a cluster. A status will be available for all libraries installed on this cluster via the API or the libraries UI as well as @@ -100,57 +138,80 @@ var clusterStatusCmd = &cobra.Command{ 3. Libraries that were previously requested on this cluster or on all clusters, but now marked for removal. Within this group there is no order - guarantee.`, + guarantee.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) clusterStatusReq.ClusterId = args[0] - response, err := w.Libraries.ClusterStatus(ctx, clusterStatusReq) + response, err := w.Libraries.ClusterStatusAll(ctx, clusterStatusReq) if err != nil { return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range clusterStatusOverrides { + fn(cmd, &clusterStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newClusterStatus()) + }) } // start install command -var installReq compute.InstallLibraries -var installJson flags.JsonFlag -func init() { - Cmd.AddCommand(installCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var installOverrides []func( + *cobra.Command, + *compute.InstallLibraries, +) + +func newInstall() *cobra.Command { + cmd := &cobra.Command{} + + var installReq compute.InstallLibraries + var installJson flags.JsonFlag + // TODO: short flags - installCmd.Flags().Var(&installJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&installJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var installCmd = &cobra.Command{ - Use: "install", - Short: `Add a library.`, - Long: `Add a library. + cmd.Use = "install" + cmd.Short = `Add a library.` + cmd.Long = `Add a library. Add libraries to be installed on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. **Note**: The actual set of libraries to be installed on a cluster is the union of the libraries specified via this method and the libraries set to be - installed on all clusters via the libraries UI.`, + installed on all clusters via the libraries UI.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -168,35 +229,56 @@ var installCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range installOverrides { + fn(cmd, &installReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newInstall()) + }) } // start uninstall command -var uninstallReq compute.UninstallLibraries -var uninstallJson flags.JsonFlag -func init() { - Cmd.AddCommand(uninstallCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var uninstallOverrides []func( + *cobra.Command, + *compute.UninstallLibraries, +) + +func newUninstall() *cobra.Command { + cmd := &cobra.Command{} + + var uninstallReq compute.UninstallLibraries + var uninstallJson flags.JsonFlag + // TODO: short flags - uninstallCmd.Flags().Var(&uninstallJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&uninstallJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var uninstallCmd = &cobra.Command{ - Use: "uninstall", - Short: `Uninstall libraries.`, - Long: `Uninstall libraries. + cmd.Use = "uninstall" + cmd.Short = `Uninstall libraries.` + cmd.Long = `Uninstall libraries. Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the cluster is restarted. Uninstalling libraries that are - not installed on the cluster will have no impact but is not an error.`, + not installed on the cluster will have no impact but is not an error.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -214,10 +296,24 @@ var uninstallCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range uninstallOverrides { + fn(cmd, &uninstallReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUninstall()) + }) } // end service Libraries diff --git a/cmd/workspace/metastores/metastores.go b/cmd/workspace/metastores/metastores.go index e16f74ff1..274869884 100755 --- a/cmd/workspace/metastores/metastores.go +++ b/cmd/workspace/metastores/metastores.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "metastores", - Short: `A metastore is the top-level container of objects in Unity Catalog.`, - Long: `A metastore is the top-level container of objects in Unity Catalog. It stores +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "metastores", + Short: `A metastore is the top-level container of objects in Unity Catalog.`, + Long: `A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to @@ -28,36 +33,53 @@ var Cmd = &cobra.Command{ workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a catalog named hive_metastore.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start assign command -var assignReq catalog.CreateMetastoreAssignment -func init() { - Cmd.AddCommand(assignCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var assignOverrides []func( + *cobra.Command, + *catalog.CreateMetastoreAssignment, +) + +func newAssign() *cobra.Command { + cmd := &cobra.Command{} + + var assignReq catalog.CreateMetastoreAssignment + // TODO: short flags -} - -var assignCmd = &cobra.Command{ - Use: "assign METASTORE_ID DEFAULT_CATALOG_NAME WORKSPACE_ID", - Short: `Create an assignment.`, - Long: `Create an assignment. + cmd.Use = "assign METASTORE_ID DEFAULT_CATALOG_NAME WORKSPACE_ID" + cmd.Short = `Create an assignment.` + cmd.Long = `Create an assignment. Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ - and __default_catalog_name__. The caller must be an account admin.`, + and __default_catalog_name__. The caller must be an account admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -73,42 +95,64 @@ var assignCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range assignOverrides { + fn(cmd, &assignReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newAssign()) + }) } // start create command -var createReq catalog.CreateMetastore -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateMetastore, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateMetastore + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `Cloud region which the metastore serves (e.g., us-west-2, westus).`) + cmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `Cloud region which the metastore serves (e.g., us-west-2, westus).`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME STORAGE_ROOT", - Short: `Create a metastore.`, - Long: `Create a metastore. + cmd.Use = "create NAME STORAGE_ROOT" + cmd.Short = `Create a metastore.` + cmd.Long = `Create a metastore. - Creates a new metastore based on a provided name and storage root path.`, + Creates a new metastore based on a provided name and storage root path.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -127,29 +171,47 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start current command -func init() { - Cmd.AddCommand(currentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var currentOverrides []func( + *cobra.Command, +) -} +func newCurrent() *cobra.Command { + cmd := &cobra.Command{} -var currentCmd = &cobra.Command{ - Use: "current", - Short: `Get metastore assignment for workspace.`, - Long: `Get metastore assignment for workspace. + cmd.Use = "current" + cmd.Short = `Get metastore assignment for workspace.` + cmd.Long = `Get metastore assignment for workspace. - Gets the metastore assignment for the workspace being accessed.`, + Gets the metastore assignment for the workspace being accessed.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Metastores.Current(ctx) @@ -157,33 +219,54 @@ var currentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range currentOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCurrent()) + }) } // start delete command -var deleteReq catalog.DeleteMetastoreRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteMetastoreRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteMetastoreRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a metastore.`, - Long: `Delete a metastore. + cmd.Use = "delete ID" + cmd.Short = `Delete a metastore.` + cmd.Long = `Delete a metastore. - Deletes a metastore. The caller must be a metastore admin.`, + Deletes a metastore. The caller must be a metastore admin.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -211,32 +294,133 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start enable-optimization command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var enableOptimizationOverrides []func( + *cobra.Command, + *catalog.UpdatePredictiveOptimization, +) + +func newEnableOptimization() *cobra.Command { + cmd := &cobra.Command{} + + var enableOptimizationReq catalog.UpdatePredictiveOptimization + var enableOptimizationJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&enableOptimizationJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "enable-optimization METASTORE_ID ENABLE" + cmd.Short = `Toggle predictive optimization on the metastore.` + cmd.Long = `Toggle predictive optimization on the metastore. + + Enables or disables predictive optimization on the metastore.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = enableOptimizationJson.Unmarshal(&enableOptimizationReq) + if err != nil { + return err + } + } else { + enableOptimizationReq.MetastoreId = args[0] + _, err = fmt.Sscan(args[1], &enableOptimizationReq.Enable) + if err != nil { + return fmt.Errorf("invalid ENABLE: %s", args[1]) + } + } + + response, err := w.Metastores.EnableOptimization(ctx, enableOptimizationReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range enableOptimizationOverrides { + fn(cmd, &enableOptimizationReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEnableOptimization()) + }) } // start get command -var getReq catalog.GetMetastoreRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetMetastoreRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetMetastoreRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get a metastore.`, - Long: `Get a metastore. + cmd.Use = "get ID" + cmd.Short = `Get a metastore.` + cmd.Long = `Get a metastore. Gets a metastore that matches the supplied ID. The caller must be a metastore - admin to retrieve this info.`, + admin to retrieve this info.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -264,31 +448,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List metastores.`, - Long: `List metastores. + cmd.Use = "list" + cmd.Short = `List metastores.` + cmd.Long = `List metastores. Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a - specific ordering of the elements in the array.`, + specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Metastores.ListAll(ctx) @@ -296,88 +498,48 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, -} + cmd.ValidArgsFunction = cobra.NoFileCompletions -// start maintenance command -var maintenanceReq catalog.UpdateAutoMaintenance -var maintenanceJson flags.JsonFlag + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} func init() { - Cmd.AddCommand(maintenanceCmd) - // TODO: short flags - maintenanceCmd.Flags().Var(&maintenanceJson, "json", `either inline JSON string or @path/to/file.json with request body`) - -} - -var maintenanceCmd = &cobra.Command{ - Use: "maintenance METASTORE_ID ENABLE", - Short: `Enables or disables auto maintenance on the metastore.`, - Long: `Enables or disables auto maintenance on the metastore. - - Enables or disables auto maintenance on the metastore.`, - - // This command is being previewed; hide from help output. - Hidden: true, - - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(2) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { - ctx := cmd.Context() - w := root.WorkspaceClient(ctx) - - if cmd.Flags().Changed("json") { - err = maintenanceJson.Unmarshal(&maintenanceReq) - if err != nil { - return err - } - } else { - maintenanceReq.MetastoreId = args[0] - _, err = fmt.Sscan(args[1], &maintenanceReq.Enable) - if err != nil { - return fmt.Errorf("invalid ENABLE: %s", args[1]) - } - } - - response, err := w.Metastores.Maintenance(ctx, maintenanceReq) - if err != nil { - return err - } - return cmdio.Render(ctx, response) - }, - // Disable completions since they are not applicable. - // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start summary command -func init() { - Cmd.AddCommand(summaryCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var summaryOverrides []func( + *cobra.Command, +) -} +func newSummary() *cobra.Command { + cmd := &cobra.Command{} -var summaryCmd = &cobra.Command{ - Use: "summary", - Short: `Get a metastore summary.`, - Long: `Get a metastore summary. + cmd.Use = "summary" + cmd.Short = `Get a metastore summary.` + cmd.Long = `Get a metastore summary. Gets information about a metastore. This summary includes the storage - credential, the cloud vendor, the cloud region, and the global metastore ID.`, + credential, the cloud vendor, the cloud region, and the global metastore ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Metastores.Summary(ctx) @@ -385,35 +547,57 @@ var summaryCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range summaryOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSummary()) + }) } // start unassign command -var unassignReq catalog.UnassignRequest -func init() { - Cmd.AddCommand(unassignCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var unassignOverrides []func( + *cobra.Command, + *catalog.UnassignRequest, +) + +func newUnassign() *cobra.Command { + cmd := &cobra.Command{} + + var unassignReq catalog.UnassignRequest + // TODO: short flags -} - -var unassignCmd = &cobra.Command{ - Use: "unassign WORKSPACE_ID METASTORE_ID", - Short: `Delete an assignment.`, - Long: `Delete an assignment. + cmd.Use = "unassign WORKSPACE_ID METASTORE_ID" + cmd.Short = `Delete an assignment.` + cmd.Long = `Delete an assignment. - Deletes a metastore assignment. The caller must be an account administrator.`, + Deletes a metastore assignment. The caller must be an account administrator.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -428,40 +612,61 @@ var unassignCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range unassignOverrides { + fn(cmd, &unassignReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUnassign()) + }) } // start update command -var updateReq catalog.UpdateMetastore -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateMetastore, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateMetastore + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.DeltaSharingOrganizationName, "delta-sharing-organization-name", updateReq.DeltaSharingOrganizationName, `The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.`) - updateCmd.Flags().Int64Var(&updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, "delta-sharing-recipient-token-lifetime-in-seconds", updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, `The lifetime of delta sharing recipient token in seconds.`) - updateCmd.Flags().Var(&updateReq.DeltaSharingScope, "delta-sharing-scope", `The scope of Delta Sharing enabled for the metastore.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The user-specified name of the metastore.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the metastore.`) - updateCmd.Flags().StringVar(&updateReq.PrivilegeModelVersion, "privilege-model-version", updateReq.PrivilegeModelVersion, `Privilege model version of the metastore, of the form major.minor (e.g., 1.0).`) - updateCmd.Flags().StringVar(&updateReq.StorageRootCredentialId, "storage-root-credential-id", updateReq.StorageRootCredentialId, `UUID of storage credential to access the metastore storage_root.`) + cmd.Flags().StringVar(&updateReq.DeltaSharingOrganizationName, "delta-sharing-organization-name", updateReq.DeltaSharingOrganizationName, `The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.`) + cmd.Flags().Int64Var(&updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, "delta-sharing-recipient-token-lifetime-in-seconds", updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, `The lifetime of delta sharing recipient token in seconds.`) + cmd.Flags().Var(&updateReq.DeltaSharingScope, "delta-sharing-scope", `The scope of Delta Sharing enabled for the metastore.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The user-specified name of the metastore.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the metastore.`) + cmd.Flags().StringVar(&updateReq.PrivilegeModelVersion, "privilege-model-version", updateReq.PrivilegeModelVersion, `Privilege model version of the metastore, of the form major.minor (e.g., 1.0).`) + cmd.Flags().StringVar(&updateReq.StorageRootCredentialId, "storage-root-credential-id", updateReq.StorageRootCredentialId, `UUID of storage credential to access the metastore storage_root.`) -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Update a metastore.`, - Long: `Update a metastore. + cmd.Use = "update ID" + cmd.Short = `Update a metastore.` + cmd.Long = `Update a metastore. Updates information for a specific metastore. The caller must be a metastore - admin.`, + admin.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -489,37 +694,58 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // start update-assignment command -var updateAssignmentReq catalog.UpdateMetastoreAssignment -func init() { - Cmd.AddCommand(updateAssignmentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateAssignmentOverrides []func( + *cobra.Command, + *catalog.UpdateMetastoreAssignment, +) + +func newUpdateAssignment() *cobra.Command { + cmd := &cobra.Command{} + + var updateAssignmentReq catalog.UpdateMetastoreAssignment + // TODO: short flags - updateAssignmentCmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog for the metastore.`) - updateAssignmentCmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`) + cmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog for the metastore.`) + cmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`) -} - -var updateAssignmentCmd = &cobra.Command{ - Use: "update-assignment WORKSPACE_ID", - Short: `Update an assignment.`, - Long: `Update an assignment. + cmd.Use = "update-assignment WORKSPACE_ID" + cmd.Short = `Update an assignment.` + cmd.Long = `Update an assignment. Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin - to update __metastore_id__; otherwise, the caller can be a Workspace admin.`, + to update __metastore_id__; otherwise, the caller can be a Workspace admin.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -550,10 +776,24 @@ var updateAssignmentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateAssignmentOverrides { + fn(cmd, &updateAssignmentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateAssignment()) + }) } // end service Metastores diff --git a/cmd/workspace/metastores/overrides.go b/cmd/workspace/metastores/overrides.go index 9d1c23ac2..2c9ca6f79 100644 --- a/cmd/workspace/metastores/overrides.go +++ b/cmd/workspace/metastores/overrides.go @@ -1,10 +1,17 @@ package metastores -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{"Region"}} {{range .}}{{.MetastoreId|green}} {{.Name|cyan}} {{.Region}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/model-registry/model-registry.go b/cmd/workspace/model-registry/model-registry.go index ce7f4806c..e2e552255 100755 --- a/cmd/workspace/model-registry/model-registry.go +++ b/cmd/workspace/model-registry/model-registry.go @@ -12,46 +12,68 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "model-registry", - Short: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`, - Long: `MLflow Model Registry is a centralized model repository and a UI and set of +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "model-registry", + Short: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`, + Long: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`, - Annotations: map[string]string{ - "package": "ml", - }, + GroupID: "ml", + Annotations: map[string]string{ + "package": "ml", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start approve-transition-request command -var approveTransitionRequestReq ml.ApproveTransitionRequest -var approveTransitionRequestJson flags.JsonFlag -func init() { - Cmd.AddCommand(approveTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var approveTransitionRequestOverrides []func( + *cobra.Command, + *ml.ApproveTransitionRequest, +) + +func newApproveTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var approveTransitionRequestReq ml.ApproveTransitionRequest + var approveTransitionRequestJson flags.JsonFlag + // TODO: short flags - approveTransitionRequestCmd.Flags().Var(&approveTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&approveTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) - approveTransitionRequestCmd.Flags().StringVar(&approveTransitionRequestReq.Comment, "comment", approveTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&approveTransitionRequestReq.Comment, "comment", approveTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var approveTransitionRequestCmd = &cobra.Command{ - Use: "approve-transition-request NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS", - Short: `Approve transition request.`, - Long: `Approve transition request. + cmd.Use = "approve-transition-request NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS" + cmd.Short = `Approve transition request.` + cmd.Long = `Approve transition request. - Approves a model version stage transition request.`, + Approves a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,42 +100,64 @@ var approveTransitionRequestCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range approveTransitionRequestOverrides { + fn(cmd, &approveTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newApproveTransitionRequest()) + }) } // start create-comment command -var createCommentReq ml.CreateComment -var createCommentJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCommentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createCommentOverrides []func( + *cobra.Command, + *ml.CreateComment, +) + +func newCreateComment() *cobra.Command { + cmd := &cobra.Command{} + + var createCommentReq ml.CreateComment + var createCommentJson flags.JsonFlag + // TODO: short flags - createCommentCmd.Flags().Var(&createCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCommentCmd = &cobra.Command{ - Use: "create-comment NAME VERSION COMMENT", - Short: `Post a comment.`, - Long: `Post a comment. + cmd.Use = "create-comment NAME VERSION COMMENT" + cmd.Short = `Post a comment.` + cmd.Long = `Post a comment. Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For - example, test results or deployment errors.`, + example, test results or deployment errors.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -133,46 +177,68 @@ var createCommentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createCommentOverrides { + fn(cmd, &createCommentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateComment()) + }) } // start create-model command -var createModelReq ml.CreateModelRequest -var createModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(createModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createModelOverrides []func( + *cobra.Command, + *ml.CreateModelRequest, +) + +func newCreateModel() *cobra.Command { + cmd := &cobra.Command{} + + var createModelReq ml.CreateModelRequest + var createModelJson flags.JsonFlag + // TODO: short flags - createModelCmd.Flags().Var(&createModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createModelCmd.Flags().StringVar(&createModelReq.Description, "description", createModelReq.Description, `Optional description for registered model.`) + cmd.Flags().StringVar(&createModelReq.Description, "description", createModelReq.Description, `Optional description for registered model.`) // TODO: array: tags -} - -var createModelCmd = &cobra.Command{ - Use: "create-model NAME", - Short: `Create a model.`, - Long: `Create a model. + cmd.Use = "create-model NAME" + cmd.Short = `Create a model.` + cmd.Long = `Create a model. Creates a new registered model with the name specified in the request body. Throws RESOURCE_ALREADY_EXISTS if a registered model with the given name - exists.`, + exists.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -190,45 +256,67 @@ var createModelCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createModelOverrides { + fn(cmd, &createModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateModel()) + }) } // start create-model-version command -var createModelVersionReq ml.CreateModelVersionRequest -var createModelVersionJson flags.JsonFlag -func init() { - Cmd.AddCommand(createModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createModelVersionOverrides []func( + *cobra.Command, + *ml.CreateModelVersionRequest, +) + +func newCreateModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var createModelVersionReq ml.CreateModelVersionRequest + var createModelVersionJson flags.JsonFlag + // TODO: short flags - createModelVersionCmd.Flags().Var(&createModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createModelVersionCmd.Flags().StringVar(&createModelVersionReq.Description, "description", createModelVersionReq.Description, `Optional description for model version.`) - createModelVersionCmd.Flags().StringVar(&createModelVersionReq.RunId, "run-id", createModelVersionReq.RunId, `MLflow run ID for correlation, if source was generated by an experiment run in MLflow tracking server.`) - createModelVersionCmd.Flags().StringVar(&createModelVersionReq.RunLink, "run-link", createModelVersionReq.RunLink, `MLflow run link - this is the exact link of the run that generated this model version, potentially hosted at another instance of MLflow.`) + cmd.Flags().StringVar(&createModelVersionReq.Description, "description", createModelVersionReq.Description, `Optional description for model version.`) + cmd.Flags().StringVar(&createModelVersionReq.RunId, "run-id", createModelVersionReq.RunId, `MLflow run ID for correlation, if source was generated by an experiment run in MLflow tracking server.`) + cmd.Flags().StringVar(&createModelVersionReq.RunLink, "run-link", createModelVersionReq.RunLink, `MLflow run link - this is the exact link of the run that generated this model version, potentially hosted at another instance of MLflow.`) // TODO: array: tags -} - -var createModelVersionCmd = &cobra.Command{ - Use: "create-model-version NAME SOURCE", - Short: `Create a model version.`, - Long: `Create a model version. + cmd.Use = "create-model-version NAME SOURCE" + cmd.Short = `Create a model version.` + cmd.Long = `Create a model version. - Creates a model version.`, + Creates a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -247,42 +335,64 @@ var createModelVersionCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createModelVersionOverrides { + fn(cmd, &createModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateModelVersion()) + }) } // start create-transition-request command -var createTransitionRequestReq ml.CreateTransitionRequest -var createTransitionRequestJson flags.JsonFlag -func init() { - Cmd.AddCommand(createTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createTransitionRequestOverrides []func( + *cobra.Command, + *ml.CreateTransitionRequest, +) + +func newCreateTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var createTransitionRequestReq ml.CreateTransitionRequest + var createTransitionRequestJson flags.JsonFlag + // TODO: short flags - createTransitionRequestCmd.Flags().Var(&createTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createTransitionRequestCmd.Flags().StringVar(&createTransitionRequestReq.Comment, "comment", createTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&createTransitionRequestReq.Comment, "comment", createTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var createTransitionRequestCmd = &cobra.Command{ - Use: "create-transition-request NAME VERSION STAGE", - Short: `Make a transition request.`, - Long: `Make a transition request. + cmd.Use = "create-transition-request NAME VERSION STAGE" + cmd.Short = `Make a transition request.` + cmd.Long = `Make a transition request. - Creates a model version stage transition request.`, + Creates a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -305,41 +415,62 @@ var createTransitionRequestCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createTransitionRequestOverrides { + fn(cmd, &createTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateTransitionRequest()) + }) } // start create-webhook command -var createWebhookReq ml.CreateRegistryWebhook -var createWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(createWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createWebhookOverrides []func( + *cobra.Command, + *ml.CreateRegistryWebhook, +) + +func newCreateWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var createWebhookReq ml.CreateRegistryWebhook + var createWebhookJson flags.JsonFlag + // TODO: short flags - createWebhookCmd.Flags().Var(&createWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createWebhookCmd.Flags().StringVar(&createWebhookReq.Description, "description", createWebhookReq.Description, `User-specified description for the webhook.`) + cmd.Flags().StringVar(&createWebhookReq.Description, "description", createWebhookReq.Description, `User-specified description for the webhook.`) // TODO: complex arg: http_url_spec // TODO: complex arg: job_spec - createWebhookCmd.Flags().StringVar(&createWebhookReq.ModelName, "model-name", createWebhookReq.ModelName, `Name of the model whose events would trigger this webhook.`) - createWebhookCmd.Flags().Var(&createWebhookReq.Status, "status", `This describes an enum.`) + cmd.Flags().StringVar(&createWebhookReq.ModelName, "model-name", createWebhookReq.ModelName, `Name of the model whose events would trigger this webhook.`) + cmd.Flags().Var(&createWebhookReq.Status, "status", `This describes an enum.`) -} - -var createWebhookCmd = &cobra.Command{ - Use: "create-webhook", - Short: `Create a webhook.`, - Long: `Create a webhook. + cmd.Use = "create-webhook" + cmd.Short = `Create a webhook.` + cmd.Long = `Create a webhook. **NOTE**: This endpoint is in Public Preview. - Creates a registry webhook.`, + Creates a registry webhook.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -357,35 +488,57 @@ var createWebhookCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createWebhookOverrides { + fn(cmd, &createWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateWebhook()) + }) } // start delete-comment command -var deleteCommentReq ml.DeleteCommentRequest -func init() { - Cmd.AddCommand(deleteCommentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteCommentOverrides []func( + *cobra.Command, + *ml.DeleteCommentRequest, +) + +func newDeleteComment() *cobra.Command { + cmd := &cobra.Command{} + + var deleteCommentReq ml.DeleteCommentRequest + // TODO: short flags -} - -var deleteCommentCmd = &cobra.Command{ - Use: "delete-comment ID", - Short: `Delete a comment.`, - Long: `Delete a comment. + cmd.Use = "delete-comment ID" + cmd.Short = `Delete a comment.` + cmd.Long = `Delete a comment. - Deletes a comment on a model version.`, + Deletes a comment on a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -396,35 +549,57 @@ var deleteCommentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteCommentOverrides { + fn(cmd, &deleteCommentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteComment()) + }) } // start delete-model command -var deleteModelReq ml.DeleteModelRequest -func init() { - Cmd.AddCommand(deleteModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelOverrides []func( + *cobra.Command, + *ml.DeleteModelRequest, +) + +func newDeleteModel() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelReq ml.DeleteModelRequest + // TODO: short flags -} - -var deleteModelCmd = &cobra.Command{ - Use: "delete-model NAME", - Short: `Delete a model.`, - Long: `Delete a model. + cmd.Use = "delete-model NAME" + cmd.Short = `Delete a model.` + cmd.Long = `Delete a model. - Deletes a registered model.`, + Deletes a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -435,35 +610,57 @@ var deleteModelCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelOverrides { + fn(cmd, &deleteModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModel()) + }) } // start delete-model-tag command -var deleteModelTagReq ml.DeleteModelTagRequest -func init() { - Cmd.AddCommand(deleteModelTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelTagOverrides []func( + *cobra.Command, + *ml.DeleteModelTagRequest, +) + +func newDeleteModelTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelTagReq ml.DeleteModelTagRequest + // TODO: short flags -} - -var deleteModelTagCmd = &cobra.Command{ - Use: "delete-model-tag NAME KEY", - Short: `Delete a model tag.`, - Long: `Delete a model tag. + cmd.Use = "delete-model-tag NAME KEY" + cmd.Short = `Delete a model tag.` + cmd.Long = `Delete a model tag. - Deletes the tag for a registered model.`, + Deletes the tag for a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -475,35 +672,57 @@ var deleteModelTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelTagOverrides { + fn(cmd, &deleteModelTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModelTag()) + }) } // start delete-model-version command -var deleteModelVersionReq ml.DeleteModelVersionRequest -func init() { - Cmd.AddCommand(deleteModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelVersionOverrides []func( + *cobra.Command, + *ml.DeleteModelVersionRequest, +) + +func newDeleteModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelVersionReq ml.DeleteModelVersionRequest + // TODO: short flags -} - -var deleteModelVersionCmd = &cobra.Command{ - Use: "delete-model-version NAME VERSION", - Short: `Delete a model version.`, - Long: `Delete a model version. + cmd.Use = "delete-model-version NAME VERSION" + cmd.Short = `Delete a model version.` + cmd.Long = `Delete a model version. - Deletes a model version.`, + Deletes a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -515,35 +734,57 @@ var deleteModelVersionCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelVersionOverrides { + fn(cmd, &deleteModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModelVersion()) + }) } // start delete-model-version-tag command -var deleteModelVersionTagReq ml.DeleteModelVersionTagRequest -func init() { - Cmd.AddCommand(deleteModelVersionTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelVersionTagOverrides []func( + *cobra.Command, + *ml.DeleteModelVersionTagRequest, +) + +func newDeleteModelVersionTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelVersionTagReq ml.DeleteModelVersionTagRequest + // TODO: short flags -} - -var deleteModelVersionTagCmd = &cobra.Command{ - Use: "delete-model-version-tag NAME VERSION KEY", - Short: `Delete a model version tag.`, - Long: `Delete a model version tag. + cmd.Use = "delete-model-version-tag NAME VERSION KEY" + cmd.Short = `Delete a model version tag.` + cmd.Long = `Delete a model version tag. - Deletes a model version tag.`, + Deletes a model version tag.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -556,37 +797,59 @@ var deleteModelVersionTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelVersionTagOverrides { + fn(cmd, &deleteModelVersionTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModelVersionTag()) + }) } // start delete-transition-request command -var deleteTransitionRequestReq ml.DeleteTransitionRequestRequest -func init() { - Cmd.AddCommand(deleteTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteTransitionRequestOverrides []func( + *cobra.Command, + *ml.DeleteTransitionRequestRequest, +) + +func newDeleteTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var deleteTransitionRequestReq ml.DeleteTransitionRequestRequest + // TODO: short flags - deleteTransitionRequestCmd.Flags().StringVar(&deleteTransitionRequestReq.Comment, "comment", deleteTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&deleteTransitionRequestReq.Comment, "comment", deleteTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var deleteTransitionRequestCmd = &cobra.Command{ - Use: "delete-transition-request NAME VERSION STAGE CREATOR", - Short: `Delete a transition request.`, - Long: `Delete a transition request. + cmd.Use = "delete-transition-request NAME VERSION STAGE CREATOR" + cmd.Short = `Delete a transition request.` + cmd.Long = `Delete a transition request. - Cancels a model version stage transition request.`, + Cancels a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -603,44 +866,66 @@ var deleteTransitionRequestCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteTransitionRequestOverrides { + fn(cmd, &deleteTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteTransitionRequest()) + }) } // start delete-webhook command -var deleteWebhookReq ml.DeleteWebhookRequest -var deleteWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteWebhookOverrides []func( + *cobra.Command, + *ml.DeleteWebhookRequest, +) + +func newDeleteWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var deleteWebhookReq ml.DeleteWebhookRequest + var deleteWebhookJson flags.JsonFlag + // TODO: short flags - deleteWebhookCmd.Flags().Var(&deleteWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - deleteWebhookCmd.Flags().StringVar(&deleteWebhookReq.Id, "id", deleteWebhookReq.Id, `Webhook ID required to delete a registry webhook.`) + cmd.Flags().StringVar(&deleteWebhookReq.Id, "id", deleteWebhookReq.Id, `Webhook ID required to delete a registry webhook.`) -} - -var deleteWebhookCmd = &cobra.Command{ - Use: "delete-webhook", - Short: `Delete a webhook.`, - Long: `Delete a webhook. + cmd.Use = "delete-webhook" + cmd.Short = `Delete a webhook.` + cmd.Long = `Delete a webhook. **NOTE:** This endpoint is in Public Preview. - Deletes a registry webhook.`, + Deletes a registry webhook.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -657,42 +942,64 @@ var deleteWebhookCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteWebhookOverrides { + fn(cmd, &deleteWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteWebhook()) + }) } // start get-latest-versions command -var getLatestVersionsReq ml.GetLatestVersionsRequest -var getLatestVersionsJson flags.JsonFlag -func init() { - Cmd.AddCommand(getLatestVersionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getLatestVersionsOverrides []func( + *cobra.Command, + *ml.GetLatestVersionsRequest, +) + +func newGetLatestVersions() *cobra.Command { + cmd := &cobra.Command{} + + var getLatestVersionsReq ml.GetLatestVersionsRequest + var getLatestVersionsJson flags.JsonFlag + // TODO: short flags - getLatestVersionsCmd.Flags().Var(&getLatestVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&getLatestVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: stages -} - -var getLatestVersionsCmd = &cobra.Command{ - Use: "get-latest-versions NAME", - Short: `Get the latest version.`, - Long: `Get the latest version. + cmd.Use = "get-latest-versions NAME" + cmd.Short = `Get the latest version.` + cmd.Long = `Get the latest version. - Gets the latest version of a registered model.`, + Gets the latest version of a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -710,39 +1017,61 @@ var getLatestVersionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getLatestVersionsOverrides { + fn(cmd, &getLatestVersionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetLatestVersions()) + }) } // start get-model command -var getModelReq ml.GetModelRequest -func init() { - Cmd.AddCommand(getModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getModelOverrides []func( + *cobra.Command, + *ml.GetModelRequest, +) + +func newGetModel() *cobra.Command { + cmd := &cobra.Command{} + + var getModelReq ml.GetModelRequest + // TODO: short flags -} - -var getModelCmd = &cobra.Command{ - Use: "get-model NAME", - Short: `Get model.`, - Long: `Get model. + cmd.Use = "get-model NAME" + cmd.Short = `Get model.` + cmd.Long = `Get model. Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. - [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel`, + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -753,35 +1082,57 @@ var getModelCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getModelOverrides { + fn(cmd, &getModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetModel()) + }) } // start get-model-version command -var getModelVersionReq ml.GetModelVersionRequest -func init() { - Cmd.AddCommand(getModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getModelVersionOverrides []func( + *cobra.Command, + *ml.GetModelVersionRequest, +) + +func newGetModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var getModelVersionReq ml.GetModelVersionRequest + // TODO: short flags -} - -var getModelVersionCmd = &cobra.Command{ - Use: "get-model-version NAME VERSION", - Short: `Get a model version.`, - Long: `Get a model version. + cmd.Use = "get-model-version NAME VERSION" + cmd.Short = `Get a model version.` + cmd.Long = `Get a model version. - Get a model version.`, + Get a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -793,35 +1144,57 @@ var getModelVersionCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getModelVersionOverrides { + fn(cmd, &getModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetModelVersion()) + }) } // start get-model-version-download-uri command -var getModelVersionDownloadUriReq ml.GetModelVersionDownloadUriRequest -func init() { - Cmd.AddCommand(getModelVersionDownloadUriCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getModelVersionDownloadUriOverrides []func( + *cobra.Command, + *ml.GetModelVersionDownloadUriRequest, +) + +func newGetModelVersionDownloadUri() *cobra.Command { + cmd := &cobra.Command{} + + var getModelVersionDownloadUriReq ml.GetModelVersionDownloadUriRequest + // TODO: short flags -} - -var getModelVersionDownloadUriCmd = &cobra.Command{ - Use: "get-model-version-download-uri NAME VERSION", - Short: `Get a model version URI.`, - Long: `Get a model version URI. + cmd.Use = "get-model-version-download-uri NAME VERSION" + cmd.Short = `Get a model version URI.` + cmd.Long = `Get a model version URI. - Gets a URI to download the model version.`, + Gets a URI to download the model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -833,44 +1206,189 @@ var getModelVersionDownloadUriCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getModelVersionDownloadUriOverrides { + fn(cmd, &getModelVersionDownloadUriReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetModelVersionDownloadUri()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *ml.GetRegisteredModelPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq ml.GetRegisteredModelPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels REGISTERED_MODEL_ID" + cmd.Short = `Get registered model permission levels.` + cmd.Long = `Get registered model permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *ml.GetRegisteredModelPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq ml.GetRegisteredModelPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions REGISTERED_MODEL_ID" + cmd.Short = `Get registered model permissions.` + cmd.Long = `Get registered model permissions. + + Gets the permissions of a registered model. Registered models can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list-models command -var listModelsReq ml.ListModelsRequest -var listModelsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listModelsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listModelsOverrides []func( + *cobra.Command, + *ml.ListModelsRequest, +) + +func newListModels() *cobra.Command { + cmd := &cobra.Command{} + + var listModelsReq ml.ListModelsRequest + var listModelsJson flags.JsonFlag + // TODO: short flags - listModelsCmd.Flags().Var(&listModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listModelsCmd.Flags().IntVar(&listModelsReq.MaxResults, "max-results", listModelsReq.MaxResults, `Maximum number of registered models desired.`) - listModelsCmd.Flags().StringVar(&listModelsReq.PageToken, "page-token", listModelsReq.PageToken, `Pagination token to go to the next page based on a previous query.`) + cmd.Flags().IntVar(&listModelsReq.MaxResults, "max-results", listModelsReq.MaxResults, `Maximum number of registered models desired.`) + cmd.Flags().StringVar(&listModelsReq.PageToken, "page-token", listModelsReq.PageToken, `Pagination token to go to the next page based on a previous query.`) -} - -var listModelsCmd = &cobra.Command{ - Use: "list-models", - Short: `List models.`, - Long: `List models. + cmd.Use = "list-models" + cmd.Short = `List models.` + cmd.Long = `List models. Lists all available registered models, up to the limit specified in - __max_results__.`, + __max_results__.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -887,35 +1405,57 @@ var listModelsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listModelsOverrides { + fn(cmd, &listModelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListModels()) + }) } // start list-transition-requests command -var listTransitionRequestsReq ml.ListTransitionRequestsRequest -func init() { - Cmd.AddCommand(listTransitionRequestsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listTransitionRequestsOverrides []func( + *cobra.Command, + *ml.ListTransitionRequestsRequest, +) + +func newListTransitionRequests() *cobra.Command { + cmd := &cobra.Command{} + + var listTransitionRequestsReq ml.ListTransitionRequestsRequest + // TODO: short flags -} - -var listTransitionRequestsCmd = &cobra.Command{ - Use: "list-transition-requests NAME VERSION", - Short: `List transition requests.`, - Long: `List transition requests. + cmd.Use = "list-transition-requests NAME VERSION" + cmd.Short = `List transition requests.` + cmd.Long = `List transition requests. - Gets a list of all open stage transition requests for the model version.`, + Gets a list of all open stage transition requests for the model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -927,46 +1467,68 @@ var listTransitionRequestsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listTransitionRequestsOverrides { + fn(cmd, &listTransitionRequestsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListTransitionRequests()) + }) } // start list-webhooks command -var listWebhooksReq ml.ListWebhooksRequest -var listWebhooksJson flags.JsonFlag -func init() { - Cmd.AddCommand(listWebhooksCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listWebhooksOverrides []func( + *cobra.Command, + *ml.ListWebhooksRequest, +) + +func newListWebhooks() *cobra.Command { + cmd := &cobra.Command{} + + var listWebhooksReq ml.ListWebhooksRequest + var listWebhooksJson flags.JsonFlag + // TODO: short flags - listWebhooksCmd.Flags().Var(&listWebhooksJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listWebhooksJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: events - listWebhooksCmd.Flags().StringVar(&listWebhooksReq.ModelName, "model-name", listWebhooksReq.ModelName, `If not specified, all webhooks associated with the specified events are listed, regardless of their associated model.`) - listWebhooksCmd.Flags().StringVar(&listWebhooksReq.PageToken, "page-token", listWebhooksReq.PageToken, `Token indicating the page of artifact results to fetch.`) + cmd.Flags().StringVar(&listWebhooksReq.ModelName, "model-name", listWebhooksReq.ModelName, `If not specified, all webhooks associated with the specified events are listed, regardless of their associated model.`) + cmd.Flags().StringVar(&listWebhooksReq.PageToken, "page-token", listWebhooksReq.PageToken, `Token indicating the page of artifact results to fetch.`) -} - -var listWebhooksCmd = &cobra.Command{ - Use: "list-webhooks", - Short: `List registry webhooks.`, - Long: `List registry webhooks. + cmd.Use = "list-webhooks" + cmd.Short = `List registry webhooks.` + cmd.Long = `List registry webhooks. **NOTE:** This endpoint is in Public Preview. - Lists all registry webhooks.`, + Lists all registry webhooks.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -983,42 +1545,64 @@ var listWebhooksCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listWebhooksOverrides { + fn(cmd, &listWebhooksReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListWebhooks()) + }) } // start reject-transition-request command -var rejectTransitionRequestReq ml.RejectTransitionRequest -var rejectTransitionRequestJson flags.JsonFlag -func init() { - Cmd.AddCommand(rejectTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var rejectTransitionRequestOverrides []func( + *cobra.Command, + *ml.RejectTransitionRequest, +) + +func newRejectTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var rejectTransitionRequestReq ml.RejectTransitionRequest + var rejectTransitionRequestJson flags.JsonFlag + // TODO: short flags - rejectTransitionRequestCmd.Flags().Var(&rejectTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&rejectTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) - rejectTransitionRequestCmd.Flags().StringVar(&rejectTransitionRequestReq.Comment, "comment", rejectTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&rejectTransitionRequestReq.Comment, "comment", rejectTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var rejectTransitionRequestCmd = &cobra.Command{ - Use: "reject-transition-request NAME VERSION STAGE", - Short: `Reject a transition request.`, - Long: `Reject a transition request. + cmd.Use = "reject-transition-request NAME VERSION STAGE" + cmd.Short = `Reject a transition request.` + cmd.Long = `Reject a transition request. - Rejects a model version stage transition request.`, + Rejects a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1041,42 +1625,64 @@ var rejectTransitionRequestCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range rejectTransitionRequestOverrides { + fn(cmd, &rejectTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRejectTransitionRequest()) + }) } // start rename-model command -var renameModelReq ml.RenameModelRequest -var renameModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(renameModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var renameModelOverrides []func( + *cobra.Command, + *ml.RenameModelRequest, +) + +func newRenameModel() *cobra.Command { + cmd := &cobra.Command{} + + var renameModelReq ml.RenameModelRequest + var renameModelJson flags.JsonFlag + // TODO: short flags - renameModelCmd.Flags().Var(&renameModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&renameModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - renameModelCmd.Flags().StringVar(&renameModelReq.NewName, "new-name", renameModelReq.NewName, `If provided, updates the name for this registered_model.`) + cmd.Flags().StringVar(&renameModelReq.NewName, "new-name", renameModelReq.NewName, `If provided, updates the name for this registered_model.`) -} - -var renameModelCmd = &cobra.Command{ - Use: "rename-model NAME", - Short: `Rename a model.`, - Long: `Rename a model. + cmd.Use = "rename-model NAME" + cmd.Short = `Rename a model.` + cmd.Long = `Rename a model. - Renames a registered model.`, + Renames a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1094,45 +1700,67 @@ var renameModelCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range renameModelOverrides { + fn(cmd, &renameModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRenameModel()) + }) } // start search-model-versions command -var searchModelVersionsReq ml.SearchModelVersionsRequest -var searchModelVersionsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchModelVersionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchModelVersionsOverrides []func( + *cobra.Command, + *ml.SearchModelVersionsRequest, +) + +func newSearchModelVersions() *cobra.Command { + cmd := &cobra.Command{} + + var searchModelVersionsReq ml.SearchModelVersionsRequest + var searchModelVersionsJson flags.JsonFlag + // TODO: short flags - searchModelVersionsCmd.Flags().Var(&searchModelVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchModelVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - searchModelVersionsCmd.Flags().StringVar(&searchModelVersionsReq.Filter, "filter", searchModelVersionsReq.Filter, `String filter condition, like "name='my-model-name'".`) - searchModelVersionsCmd.Flags().IntVar(&searchModelVersionsReq.MaxResults, "max-results", searchModelVersionsReq.MaxResults, `Maximum number of models desired.`) + cmd.Flags().StringVar(&searchModelVersionsReq.Filter, "filter", searchModelVersionsReq.Filter, `String filter condition, like "name='my-model-name'".`) + cmd.Flags().IntVar(&searchModelVersionsReq.MaxResults, "max-results", searchModelVersionsReq.MaxResults, `Maximum number of models desired.`) // TODO: array: order_by - searchModelVersionsCmd.Flags().StringVar(&searchModelVersionsReq.PageToken, "page-token", searchModelVersionsReq.PageToken, `Pagination token to go to next page based on previous search query.`) + cmd.Flags().StringVar(&searchModelVersionsReq.PageToken, "page-token", searchModelVersionsReq.PageToken, `Pagination token to go to next page based on previous search query.`) -} - -var searchModelVersionsCmd = &cobra.Command{ - Use: "search-model-versions", - Short: `Searches model versions.`, - Long: `Searches model versions. + cmd.Use = "search-model-versions" + cmd.Short = `Searches model versions.` + cmd.Long = `Searches model versions. - Searches for specific model versions based on the supplied __filter__.`, + Searches for specific model versions based on the supplied __filter__.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1149,45 +1777,67 @@ var searchModelVersionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchModelVersionsOverrides { + fn(cmd, &searchModelVersionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchModelVersions()) + }) } // start search-models command -var searchModelsReq ml.SearchModelsRequest -var searchModelsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchModelsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchModelsOverrides []func( + *cobra.Command, + *ml.SearchModelsRequest, +) + +func newSearchModels() *cobra.Command { + cmd := &cobra.Command{} + + var searchModelsReq ml.SearchModelsRequest + var searchModelsJson flags.JsonFlag + // TODO: short flags - searchModelsCmd.Flags().Var(&searchModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - searchModelsCmd.Flags().StringVar(&searchModelsReq.Filter, "filter", searchModelsReq.Filter, `String filter condition, like "name LIKE 'my-model-name'".`) - searchModelsCmd.Flags().IntVar(&searchModelsReq.MaxResults, "max-results", searchModelsReq.MaxResults, `Maximum number of models desired.`) + cmd.Flags().StringVar(&searchModelsReq.Filter, "filter", searchModelsReq.Filter, `String filter condition, like "name LIKE 'my-model-name'".`) + cmd.Flags().IntVar(&searchModelsReq.MaxResults, "max-results", searchModelsReq.MaxResults, `Maximum number of models desired.`) // TODO: array: order_by - searchModelsCmd.Flags().StringVar(&searchModelsReq.PageToken, "page-token", searchModelsReq.PageToken, `Pagination token to go to the next page based on a previous search query.`) + cmd.Flags().StringVar(&searchModelsReq.PageToken, "page-token", searchModelsReq.PageToken, `Pagination token to go to the next page based on a previous search query.`) -} - -var searchModelsCmd = &cobra.Command{ - Use: "search-models", - Short: `Search models.`, - Long: `Search models. + cmd.Use = "search-models" + cmd.Short = `Search models.` + cmd.Long = `Search models. - Search for registered models based on the specified __filter__.`, + Search for registered models based on the specified __filter__.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1204,40 +1854,62 @@ var searchModelsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchModelsOverrides { + fn(cmd, &searchModelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchModels()) + }) } // start set-model-tag command -var setModelTagReq ml.SetModelTagRequest -var setModelTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setModelTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setModelTagOverrides []func( + *cobra.Command, + *ml.SetModelTagRequest, +) + +func newSetModelTag() *cobra.Command { + cmd := &cobra.Command{} + + var setModelTagReq ml.SetModelTagRequest + var setModelTagJson flags.JsonFlag + // TODO: short flags - setModelTagCmd.Flags().Var(&setModelTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setModelTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setModelTagCmd = &cobra.Command{ - Use: "set-model-tag NAME KEY VALUE", - Short: `Set a tag.`, - Long: `Set a tag. + cmd.Use = "set-model-tag NAME KEY VALUE" + cmd.Short = `Set a tag.` + cmd.Long = `Set a tag. - Sets a tag on a registered model.`, + Sets a tag on a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1257,40 +1929,62 @@ var setModelTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setModelTagOverrides { + fn(cmd, &setModelTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetModelTag()) + }) } // start set-model-version-tag command -var setModelVersionTagReq ml.SetModelVersionTagRequest -var setModelVersionTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setModelVersionTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setModelVersionTagOverrides []func( + *cobra.Command, + *ml.SetModelVersionTagRequest, +) + +func newSetModelVersionTag() *cobra.Command { + cmd := &cobra.Command{} + + var setModelVersionTagReq ml.SetModelVersionTagRequest + var setModelVersionTagJson flags.JsonFlag + // TODO: short flags - setModelVersionTagCmd.Flags().Var(&setModelVersionTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setModelVersionTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setModelVersionTagCmd = &cobra.Command{ - Use: "set-model-version-tag NAME VERSION KEY VALUE", - Short: `Set a version tag.`, - Long: `Set a version tag. + cmd.Use = "set-model-version-tag NAME VERSION KEY VALUE" + cmd.Short = `Set a version tag.` + cmd.Long = `Set a version tag. - Sets a model version tag.`, + Sets a model version tag.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1311,44 +2005,138 @@ var setModelVersionTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setModelVersionTagOverrides { + fn(cmd, &setModelVersionTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetModelVersionTag()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *ml.RegisteredModelPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq ml.RegisteredModelPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions REGISTERED_MODEL_ID" + cmd.Short = `Set registered model permissions.` + cmd.Long = `Set registered model permissions. + + Sets permissions on a registered model. Registered models can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start test-registry-webhook command -var testRegistryWebhookReq ml.TestRegistryWebhookRequest -var testRegistryWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(testRegistryWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var testRegistryWebhookOverrides []func( + *cobra.Command, + *ml.TestRegistryWebhookRequest, +) + +func newTestRegistryWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var testRegistryWebhookReq ml.TestRegistryWebhookRequest + var testRegistryWebhookJson flags.JsonFlag + // TODO: short flags - testRegistryWebhookCmd.Flags().Var(&testRegistryWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&testRegistryWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - testRegistryWebhookCmd.Flags().Var(&testRegistryWebhookReq.Event, "event", `If event is specified, the test trigger uses the specified event.`) + cmd.Flags().Var(&testRegistryWebhookReq.Event, "event", `If event is specified, the test trigger uses the specified event.`) -} - -var testRegistryWebhookCmd = &cobra.Command{ - Use: "test-registry-webhook ID", - Short: `Test a webhook.`, - Long: `Test a webhook. + cmd.Use = "test-registry-webhook ID" + cmd.Short = `Test a webhook.` + cmd.Long = `Test a webhook. **NOTE:** This endpoint is in Public Preview. - Tests a registry webhook.`, + Tests a registry webhook.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1366,46 +2154,68 @@ var testRegistryWebhookCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range testRegistryWebhookOverrides { + fn(cmd, &testRegistryWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newTestRegistryWebhook()) + }) } // start transition-stage command -var transitionStageReq ml.TransitionModelVersionStageDatabricks -var transitionStageJson flags.JsonFlag -func init() { - Cmd.AddCommand(transitionStageCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var transitionStageOverrides []func( + *cobra.Command, + *ml.TransitionModelVersionStageDatabricks, +) + +func newTransitionStage() *cobra.Command { + cmd := &cobra.Command{} + + var transitionStageReq ml.TransitionModelVersionStageDatabricks + var transitionStageJson flags.JsonFlag + // TODO: short flags - transitionStageCmd.Flags().Var(&transitionStageJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&transitionStageJson, "json", `either inline JSON string or @path/to/file.json with request body`) - transitionStageCmd.Flags().StringVar(&transitionStageReq.Comment, "comment", transitionStageReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&transitionStageReq.Comment, "comment", transitionStageReq.Comment, `User-provided comment on the action.`) -} - -var transitionStageCmd = &cobra.Command{ - Use: "transition-stage NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS", - Short: `Transition a stage.`, - Long: `Transition a stage. + cmd.Use = "transition-stage NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS" + cmd.Short = `Transition a stage.` + cmd.Long = `Transition a stage. Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", - [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage`, + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1432,40 +2242,62 @@ var transitionStageCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range transitionStageOverrides { + fn(cmd, &transitionStageReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newTransitionStage()) + }) } // start update-comment command -var updateCommentReq ml.UpdateComment -var updateCommentJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCommentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateCommentOverrides []func( + *cobra.Command, + *ml.UpdateComment, +) + +func newUpdateComment() *cobra.Command { + cmd := &cobra.Command{} + + var updateCommentReq ml.UpdateComment + var updateCommentJson flags.JsonFlag + // TODO: short flags - updateCommentCmd.Flags().Var(&updateCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCommentCmd = &cobra.Command{ - Use: "update-comment ID COMMENT", - Short: `Update a comment.`, - Long: `Update a comment. + cmd.Use = "update-comment ID COMMENT" + cmd.Short = `Update a comment.` + cmd.Long = `Update a comment. - Post an edit to a comment on a model version.`, + Post an edit to a comment on a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1484,42 +2316,64 @@ var updateCommentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateCommentOverrides { + fn(cmd, &updateCommentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateComment()) + }) } // start update-model command -var updateModelReq ml.UpdateModelRequest -var updateModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateModelOverrides []func( + *cobra.Command, + *ml.UpdateModelRequest, +) + +func newUpdateModel() *cobra.Command { + cmd := &cobra.Command{} + + var updateModelReq ml.UpdateModelRequest + var updateModelJson flags.JsonFlag + // TODO: short flags - updateModelCmd.Flags().Var(&updateModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateModelCmd.Flags().StringVar(&updateModelReq.Description, "description", updateModelReq.Description, `If provided, updates the description for this registered_model.`) + cmd.Flags().StringVar(&updateModelReq.Description, "description", updateModelReq.Description, `If provided, updates the description for this registered_model.`) -} - -var updateModelCmd = &cobra.Command{ - Use: "update-model NAME", - Short: `Update model.`, - Long: `Update model. + cmd.Use = "update-model NAME" + cmd.Short = `Update model.` + cmd.Long = `Update model. - Updates a registered model.`, + Updates a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1537,42 +2391,64 @@ var updateModelCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateModelOverrides { + fn(cmd, &updateModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateModel()) + }) } // start update-model-version command -var updateModelVersionReq ml.UpdateModelVersionRequest -var updateModelVersionJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateModelVersionOverrides []func( + *cobra.Command, + *ml.UpdateModelVersionRequest, +) + +func newUpdateModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var updateModelVersionReq ml.UpdateModelVersionRequest + var updateModelVersionJson flags.JsonFlag + // TODO: short flags - updateModelVersionCmd.Flags().Var(&updateModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateModelVersionCmd.Flags().StringVar(&updateModelVersionReq.Description, "description", updateModelVersionReq.Description, `If provided, updates the description for this registered_model.`) + cmd.Flags().StringVar(&updateModelVersionReq.Description, "description", updateModelVersionReq.Description, `If provided, updates the description for this registered_model.`) -} - -var updateModelVersionCmd = &cobra.Command{ - Use: "update-model-version NAME VERSION", - Short: `Update model version.`, - Long: `Update model version. + cmd.Use = "update-model-version NAME VERSION" + cmd.Short = `Update model version.` + cmd.Long = `Update model version. - Updates the model version.`, + Updates the model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1591,48 +2467,142 @@ var updateModelVersionCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateModelVersionOverrides { + fn(cmd, &updateModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateModelVersion()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *ml.RegisteredModelPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq ml.RegisteredModelPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions REGISTERED_MODEL_ID" + cmd.Short = `Update registered model permissions.` + cmd.Long = `Update registered model permissions. + + Updates the permissions on a registered model. Registered models can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // start update-webhook command -var updateWebhookReq ml.UpdateRegistryWebhook -var updateWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateWebhookOverrides []func( + *cobra.Command, + *ml.UpdateRegistryWebhook, +) + +func newUpdateWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var updateWebhookReq ml.UpdateRegistryWebhook + var updateWebhookJson flags.JsonFlag + // TODO: short flags - updateWebhookCmd.Flags().Var(&updateWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateWebhookCmd.Flags().StringVar(&updateWebhookReq.Description, "description", updateWebhookReq.Description, `User-specified description for the webhook.`) + cmd.Flags().StringVar(&updateWebhookReq.Description, "description", updateWebhookReq.Description, `User-specified description for the webhook.`) // TODO: array: events // TODO: complex arg: http_url_spec // TODO: complex arg: job_spec - updateWebhookCmd.Flags().Var(&updateWebhookReq.Status, "status", `This describes an enum.`) + cmd.Flags().Var(&updateWebhookReq.Status, "status", `This describes an enum.`) -} - -var updateWebhookCmd = &cobra.Command{ - Use: "update-webhook ID", - Short: `Update a webhook.`, - Long: `Update a webhook. + cmd.Use = "update-webhook ID" + cmd.Short = `Update a webhook.` + cmd.Long = `Update a webhook. **NOTE:** This endpoint is in Public Preview. - Updates a registry webhook.`, + Updates a registry webhook.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1650,10 +2620,24 @@ var updateWebhookCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateWebhookOverrides { + fn(cmd, &updateWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateWebhook()) + }) } // end service ModelRegistry diff --git a/cmd/workspace/model-versions/model-versions.go b/cmd/workspace/model-versions/model-versions.go new file mode 100755 index 000000000..f62cddab0 --- /dev/null +++ b/cmd/workspace/model-versions/model-versions.go @@ -0,0 +1,400 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package model_versions + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "model-versions", + Short: `Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.`, + Long: `Databricks provides a hosted version of MLflow Model Registry in Unity + Catalog. Models in Unity Catalog provide centralized access control, auditing, + lineage, and discovery of ML models across Databricks workspaces. + + This API reference documents the REST endpoints for managing model versions in + Unity Catalog. For more details, see the [registered models API + docs](/api/workspace/registeredmodels).`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteModelVersionRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteModelVersionRequest + + // TODO: short flags + + cmd.Use = "delete FULL_NAME VERSION" + cmd.Short = `Delete a Model Version.` + cmd.Long = `Delete a Model Version. + + Deletes a model version from the specified registered model. Any aliases + assigned to the model version will also be deleted. + + The caller must be a metastore admin or an owner of the parent registered + model. For the latter case, the caller must also be the owner or have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.FullName = args[0] + _, err = fmt.Sscan(args[1], &deleteReq.Version) + if err != nil { + return fmt.Errorf("invalid VERSION: %s", args[1]) + } + + err = w.ModelVersions.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetModelVersionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetModelVersionRequest + + // TODO: short flags + + cmd.Use = "get FULL_NAME VERSION" + cmd.Short = `Get a Model Version.` + cmd.Long = `Get a Model Version. + + Get a model version. + + The caller must be a metastore admin or an owner of (or have the **EXECUTE** + privilege on) the parent registered model. For the latter case, the caller + must also be the owner or have the **USE_CATALOG** privilege on the parent + catalog and the **USE_SCHEMA** privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getReq.FullName = args[0] + _, err = fmt.Sscan(args[1], &getReq.Version) + if err != nil { + return fmt.Errorf("invalid VERSION: %s", args[1]) + } + + response, err := w.ModelVersions.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-by-alias command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getByAliasOverrides []func( + *cobra.Command, + *catalog.GetByAliasRequest, +) + +func newGetByAlias() *cobra.Command { + cmd := &cobra.Command{} + + var getByAliasReq catalog.GetByAliasRequest + + // TODO: short flags + + cmd.Use = "get-by-alias FULL_NAME ALIAS" + cmd.Short = `Get Model Version By Alias.` + cmd.Long = `Get Model Version By Alias. + + Get a model version by alias. + + The caller must be a metastore admin or an owner of (or have the **EXECUTE** + privilege on) the registered model. For the latter case, the caller must also + be the owner or have the **USE_CATALOG** privilege on the parent catalog and + the **USE_SCHEMA** privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getByAliasReq.FullName = args[0] + getByAliasReq.Alias = args[1] + + response, err := w.ModelVersions.GetByAlias(ctx, getByAliasReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getByAliasOverrides { + fn(cmd, &getByAliasReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetByAlias()) + }) +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListModelVersionsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListModelVersionsRequest + + // TODO: short flags + + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Max number of model versions to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + + cmd.Use = "list FULL_NAME" + cmd.Short = `List Model Versions.` + cmd.Long = `List Model Versions. + + List model versions. You can list model versions under a particular schema, or + list all model versions in the current metastore. + + The returned models are filtered based on the privileges of the calling user. + For example, the metastore admin is able to list all the model versions. A + regular user needs to be the owner or have the **EXECUTE** privilege on the + parent registered model to recieve the model versions in the response. For the + latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema. + + There is no guarantee of a specific ordering of the elements in the response.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + listReq.FullName = args[0] + + response, err := w.ModelVersions.ListAll(ctx, listReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateModelVersionRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateModelVersionRequest + + // TODO: short flags + + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the model version.`) + + cmd.Use = "update FULL_NAME VERSION" + cmd.Short = `Update a Model Version.` + cmd.Long = `Update a Model Version. + + Updates the specified model version. + + The caller must be a metastore admin or an owner of the parent registered + model. For the latter case, the caller must also be the owner or have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema. + + Currently only the comment of the model version can be updated.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + updateReq.FullName = args[0] + _, err = fmt.Sscan(args[1], &updateReq.Version) + if err != nil { + return fmt.Errorf("invalid VERSION: %s", args[1]) + } + + response, err := w.ModelVersions.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service ModelVersions diff --git a/cmd/workspace/permissions/permissions.go b/cmd/workspace/permissions/permissions.go index c92501207..c168a1a48 100755 --- a/cmd/workspace/permissions/permissions.go +++ b/cmd/workspace/permissions/permissions.go @@ -10,40 +10,107 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "permissions", - Short: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`, - Long: `Permissions API are used to create read, write, edit, update and manage access - for various users on different objects and endpoints.`, - Annotations: map[string]string{ - "package": "iam", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "permissions", + Short: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`, + Long: `Permissions API are used to create read, write, edit, update and manage access + for various users on different objects and endpoints. + + * **[Cluster permissions](:service:clusters)** — Manage which users can + manage, restart, or attach to clusters. + + * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which + users can use cluster policies. + + * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage + which users can view, manage, run, cancel, or own a Delta Live Tables + pipeline. + + * **[Job permissions](:service:jobs)** — Manage which users can view, + manage, trigger, cancel, or own a job. + + * **[MLflow experiment permissions](:service:experiments)** — Manage which + users can read, edit, or manage MLflow experiments. + + * **[MLflow registered model permissions](:service:modelregistry)** — Manage + which users can read, edit, or manage MLflow registered models. + + * **[Password permissions](:service:users)** — Manage which users can use + password login when SSO is enabled. + + * **[Instance Pool permissions](:service:instancepools)** — Manage which + users can manage or attach to pools. + + * **[Repo permissions](repos)** — Manage which users can read, run, edit, or + manage a repo. + + * **[Serving endpoint permissions](:service:servingendpoints)** — Manage + which users can view, query, or manage a serving endpoint. + + * **[SQL warehouse permissions](:service:warehouses)** — Manage which users + can use or manage SQL warehouses. + + * **[Token permissions](:service:tokenmanagement)** — Manage which users can + create or use tokens. + + * **[Workspace object permissions](:service:workspace)** — Manage which + users can read, run, edit, or manage directories, files, and notebooks. + + For the mapping of the required permissions for specific actions or abilities + and other important information, see [Access Control]. + + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq iam.GetPermissionRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetPermissionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetPermissionRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Get object permissions.`, - Long: `Get object permissions. + cmd.Use = "get REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Get object permissions.` + cmd.Long = `Get object permissions. - Gets the permission of an object. Objects can inherit permissions from their - parent objects or root objects.`, + Gets the permissions of an object. Objects can inherit permissions from their + parent objects or root object.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -55,35 +122,57 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-permission-levels command -var getPermissionLevelsReq iam.GetPermissionLevelsRequest -func init() { - Cmd.AddCommand(getPermissionLevelsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *iam.GetPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq iam.GetPermissionLevelsRequest + // TODO: short flags -} - -var getPermissionLevelsCmd = &cobra.Command{ - Use: "get-permission-levels REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Get permission levels.`, - Long: `Get permission levels. + cmd.Use = "get-permission-levels REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Get object permission levels.` + cmd.Long = `Get object permission levels. - Gets the permission levels that a user can have on an object.`, + Gets the permission levels that a user can have on an object.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -95,40 +184,62 @@ var getPermissionLevelsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) } // start set command -var setReq iam.PermissionsRequest -var setJson flags.JsonFlag -func init() { - Cmd.AddCommand(setCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setOverrides []func( + *cobra.Command, + *iam.PermissionsRequest, +) + +func newSet() *cobra.Command { + cmd := &cobra.Command{} + + var setReq iam.PermissionsRequest + var setJson flags.JsonFlag + // TODO: short flags - setCmd.Flags().Var(&setJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list -} - -var setCmd = &cobra.Command{ - Use: "set REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Set permissions.`, - Long: `Set permissions. + cmd.Use = "set REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Set object permissions.` + cmd.Long = `Set object permissions. - Sets permissions on object. Objects can inherit permissions from their parent - objects and root objects.`, + Sets permissions on an object. Objects can inherit permissions from their + parent objects or root object.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -141,44 +252,67 @@ var setCmd = &cobra.Command{ setReq.RequestObjectType = args[0] setReq.RequestObjectId = args[1] - err = w.Permissions.Set(ctx, setReq) + response, err := w.Permissions.Set(ctx, setReq) if err != nil { return err } - return nil - }, + return cmdio.Render(ctx, response) + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setOverrides { + fn(cmd, &setReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSet()) + }) } // start update command -var updateReq iam.PermissionsRequest -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.PermissionsRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.PermissionsRequest + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list -} - -var updateCmd = &cobra.Command{ - Use: "update REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Update permission.`, - Long: `Update permission. + cmd.Use = "update REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Update object permissions.` + cmd.Long = `Update object permissions. - Updates the permissions on an object.`, + Updates the permissions on an object. Objects can inherit permissions from + their parent objects or root object.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -191,15 +325,29 @@ var updateCmd = &cobra.Command{ updateReq.RequestObjectType = args[0] updateReq.RequestObjectId = args[1] - err = w.Permissions.Update(ctx, updateReq) + response, err := w.Permissions.Update(ctx, updateReq) if err != nil { return err } - return nil - }, + return cmdio.Render(ctx, response) + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Permissions diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index 8d6ffff14..10bcc226e 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "pipelines", - Short: `The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.`, - Long: `The Delta Live Tables API allows you to create, edit, delete, start, and view +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "pipelines", + Short: `The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.`, + Long: `The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. Delta Live Tables is a framework for building reliable, maintainable, and @@ -30,59 +35,57 @@ var Cmd = &cobra.Command{ quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations.`, - Annotations: map[string]string{ - "package": "pipelines", - }, + GroupID: "pipelines", + Annotations: map[string]string{ + "package": "pipelines", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq pipelines.CreatePipeline -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *pipelines.CreatePipeline, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq pipelines.CreatePipeline + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.AllowDuplicateNames, "allow-duplicate-names", createReq.AllowDuplicateNames, `If false, deployment will fail if name conflicts with that of another pipeline.`) - createCmd.Flags().StringVar(&createReq.Catalog, "catalog", createReq.Catalog, `A catalog in Unity Catalog to publish data from this pipeline to.`) - createCmd.Flags().StringVar(&createReq.Channel, "channel", createReq.Channel, `DLT Release Channel that specifies which version to use.`) - // TODO: array: clusters - // TODO: map via StringToStringVar: configuration - createCmd.Flags().BoolVar(&createReq.Continuous, "continuous", createReq.Continuous, `Whether the pipeline is continuous or triggered.`) - createCmd.Flags().BoolVar(&createReq.Development, "development", createReq.Development, `Whether the pipeline is in Development mode.`) - createCmd.Flags().BoolVar(&createReq.DryRun, "dry-run", createReq.DryRun, ``) - createCmd.Flags().StringVar(&createReq.Edition, "edition", createReq.Edition, `Pipeline product edition.`) - // TODO: complex arg: filters - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Unique identifier for this pipeline.`) - // TODO: array: libraries - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Friendly identifier for this pipeline.`) - createCmd.Flags().BoolVar(&createReq.Photon, "photon", createReq.Photon, `Whether Photon is enabled for this pipeline.`) - createCmd.Flags().BoolVar(&createReq.Serverless, "serverless", createReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) - createCmd.Flags().StringVar(&createReq.Storage, "storage", createReq.Storage, `DBFS root directory for storing checkpoints and tables.`) - createCmd.Flags().StringVar(&createReq.Target, "target", createReq.Target, `Target schema (database) to add tables in this pipeline to.`) - // TODO: complex arg: trigger - -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a pipeline.`, - Long: `Create a pipeline. + cmd.Use = "create" + cmd.Short = `Create a pipeline.` + cmd.Long = `Create a pipeline. Creates a new data processing pipeline based on the requested configuration. - If successful, this method returns the ID of the new pipeline.`, + If successful, this method returns the ID of the new pipeline.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -92,6 +95,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Pipelines.Create(ctx, createReq) @@ -99,31 +103,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq pipelines.DeletePipelineRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *pipelines.DeletePipelineRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq pipelines.DeletePipelineRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete PIPELINE_ID", - Short: `Delete a pipeline.`, - Long: `Delete a pipeline. + cmd.Use = "delete PIPELINE_ID" + cmd.Short = `Delete a pipeline.` + cmd.Long = `Delete a pipeline. - Deletes a pipeline.`, + Deletes a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -151,35 +176,55 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq pipelines.GetPipelineRequest -var getSkipWait bool -var getTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *pipelines.GetPipelineRequest, +) -func init() { - Cmd.AddCommand(getCmd) +func newGet() *cobra.Command { + cmd := &cobra.Command{} - getCmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) - getCmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var getReq pipelines.GetPipelineRequest + + var getSkipWait bool + var getTimeout time.Duration + + cmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} + cmd.Use = "get PIPELINE_ID" + cmd.Short = `Get a pipeline.` + cmd.Long = `Get a pipeline.` -var getCmd = &cobra.Command{ - Use: "get PIPELINE_ID", - Short: `Get a pipeline.`, - Long: `Get a pipeline.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -207,35 +252,204 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *pipelines.GetPipelinePermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq pipelines.GetPipelinePermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels PIPELINE_ID" + cmd.Short = `Get pipeline permission levels.` + cmd.Long = `Get pipeline permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + getPermissionLevelsReq.PipelineId = args[0] + + response, err := w.Pipelines.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *pipelines.GetPipelinePermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq pipelines.GetPipelinePermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions PIPELINE_ID" + cmd.Short = `Get pipeline permissions.` + cmd.Long = `Get pipeline permissions. + + Gets the permissions of a pipeline. Pipelines can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + getPermissionsReq.PipelineId = args[0] + + response, err := w.Pipelines.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start get-update command -var getUpdateReq pipelines.GetUpdateRequest -func init() { - Cmd.AddCommand(getUpdateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getUpdateOverrides []func( + *cobra.Command, + *pipelines.GetUpdateRequest, +) + +func newGetUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var getUpdateReq pipelines.GetUpdateRequest + // TODO: short flags -} - -var getUpdateCmd = &cobra.Command{ - Use: "get-update PIPELINE_ID UPDATE_ID", - Short: `Get a pipeline update.`, - Long: `Get a pipeline update. + cmd.Use = "get-update PIPELINE_ID UPDATE_ID" + cmd.Short = `Get a pipeline update.` + cmd.Long = `Get a pipeline update. - Gets an update from an active pipeline.`, + Gets an update from an active pipeline.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -247,38 +461,59 @@ var getUpdateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getUpdateOverrides { + fn(cmd, &getUpdateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetUpdate()) + }) } // start list-pipeline-events command -var listPipelineEventsReq pipelines.ListPipelineEventsRequest -var listPipelineEventsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listPipelineEventsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listPipelineEventsOverrides []func( + *cobra.Command, + *pipelines.ListPipelineEventsRequest, +) + +func newListPipelineEvents() *cobra.Command { + cmd := &cobra.Command{} + + var listPipelineEventsReq pipelines.ListPipelineEventsRequest + var listPipelineEventsJson flags.JsonFlag + // TODO: short flags - listPipelineEventsCmd.Flags().Var(&listPipelineEventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listPipelineEventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listPipelineEventsCmd.Flags().StringVar(&listPipelineEventsReq.Filter, "filter", listPipelineEventsReq.Filter, `Criteria to select a subset of results, expressed using a SQL-like syntax.`) - listPipelineEventsCmd.Flags().IntVar(&listPipelineEventsReq.MaxResults, "max-results", listPipelineEventsReq.MaxResults, `Max number of entries to return in a single page.`) + cmd.Flags().StringVar(&listPipelineEventsReq.Filter, "filter", listPipelineEventsReq.Filter, `Criteria to select a subset of results, expressed using a SQL-like syntax.`) + cmd.Flags().IntVar(&listPipelineEventsReq.MaxResults, "max-results", listPipelineEventsReq.MaxResults, `Max number of entries to return in a single page.`) // TODO: array: order_by - listPipelineEventsCmd.Flags().StringVar(&listPipelineEventsReq.PageToken, "page-token", listPipelineEventsReq.PageToken, `Page token returned by previous call.`) + cmd.Flags().StringVar(&listPipelineEventsReq.PageToken, "page-token", listPipelineEventsReq.PageToken, `Page token returned by previous call.`) -} - -var listPipelineEventsCmd = &cobra.Command{ - Use: "list-pipeline-events PIPELINE_ID", - Short: `List pipeline events.`, - Long: `List pipeline events. + cmd.Use = "list-pipeline-events PIPELINE_ID" + cmd.Short = `List pipeline events.` + cmd.Long = `List pipeline events. - Retrieves events for a pipeline.`, + Retrieves events for a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -312,45 +547,67 @@ var listPipelineEventsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listPipelineEventsOverrides { + fn(cmd, &listPipelineEventsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListPipelineEvents()) + }) } // start list-pipelines command -var listPipelinesReq pipelines.ListPipelinesRequest -var listPipelinesJson flags.JsonFlag -func init() { - Cmd.AddCommand(listPipelinesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listPipelinesOverrides []func( + *cobra.Command, + *pipelines.ListPipelinesRequest, +) + +func newListPipelines() *cobra.Command { + cmd := &cobra.Command{} + + var listPipelinesReq pipelines.ListPipelinesRequest + var listPipelinesJson flags.JsonFlag + // TODO: short flags - listPipelinesCmd.Flags().Var(&listPipelinesJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listPipelinesJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listPipelinesCmd.Flags().StringVar(&listPipelinesReq.Filter, "filter", listPipelinesReq.Filter, `Select a subset of results based on the specified criteria.`) - listPipelinesCmd.Flags().IntVar(&listPipelinesReq.MaxResults, "max-results", listPipelinesReq.MaxResults, `The maximum number of entries to return in a single page.`) + cmd.Flags().StringVar(&listPipelinesReq.Filter, "filter", listPipelinesReq.Filter, `Select a subset of results based on the specified criteria.`) + cmd.Flags().IntVar(&listPipelinesReq.MaxResults, "max-results", listPipelinesReq.MaxResults, `The maximum number of entries to return in a single page.`) // TODO: array: order_by - listPipelinesCmd.Flags().StringVar(&listPipelinesReq.PageToken, "page-token", listPipelinesReq.PageToken, `Page token returned by previous call.`) + cmd.Flags().StringVar(&listPipelinesReq.PageToken, "page-token", listPipelinesReq.PageToken, `Page token returned by previous call.`) -} - -var listPipelinesCmd = &cobra.Command{ - Use: "list-pipelines", - Short: `List pipelines.`, - Long: `List pipelines. + cmd.Use = "list-pipelines" + cmd.Short = `List pipelines.` + cmd.Long = `List pipelines. - Lists pipelines defined in the Delta Live Tables system.`, + Lists pipelines defined in the Delta Live Tables system.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -367,35 +624,56 @@ var listPipelinesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listPipelinesOverrides { + fn(cmd, &listPipelinesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListPipelines()) + }) } // start list-updates command -var listUpdatesReq pipelines.ListUpdatesRequest -func init() { - Cmd.AddCommand(listUpdatesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listUpdatesOverrides []func( + *cobra.Command, + *pipelines.ListUpdatesRequest, +) + +func newListUpdates() *cobra.Command { + cmd := &cobra.Command{} + + var listUpdatesReq pipelines.ListUpdatesRequest + // TODO: short flags - listUpdatesCmd.Flags().IntVar(&listUpdatesReq.MaxResults, "max-results", listUpdatesReq.MaxResults, `Max number of entries to return in a single page.`) - listUpdatesCmd.Flags().StringVar(&listUpdatesReq.PageToken, "page-token", listUpdatesReq.PageToken, `Page token returned by previous call.`) - listUpdatesCmd.Flags().StringVar(&listUpdatesReq.UntilUpdateId, "until-update-id", listUpdatesReq.UntilUpdateId, `If present, returns updates until and including this update_id.`) + cmd.Flags().IntVar(&listUpdatesReq.MaxResults, "max-results", listUpdatesReq.MaxResults, `Max number of entries to return in a single page.`) + cmd.Flags().StringVar(&listUpdatesReq.PageToken, "page-token", listUpdatesReq.PageToken, `Page token returned by previous call.`) + cmd.Flags().StringVar(&listUpdatesReq.UntilUpdateId, "until-update-id", listUpdatesReq.UntilUpdateId, `If present, returns updates until and including this update_id.`) -} - -var listUpdatesCmd = &cobra.Command{ - Use: "list-updates PIPELINE_ID", - Short: `List pipeline updates.`, - Long: `List pipeline updates. + cmd.Use = "list-updates PIPELINE_ID" + cmd.Short = `List pipeline updates.` + cmd.Long = `List pipeline updates. - List updates for an active pipeline.`, + List updates for an active pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -423,37 +701,57 @@ var listUpdatesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listUpdatesOverrides { + fn(cmd, &listUpdatesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListUpdates()) + }) } // start reset command -var resetReq pipelines.ResetRequest -var resetSkipWait bool -var resetTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resetOverrides []func( + *cobra.Command, + *pipelines.ResetRequest, +) -func init() { - Cmd.AddCommand(resetCmd) +func newReset() *cobra.Command { + cmd := &cobra.Command{} - resetCmd.Flags().BoolVar(&resetSkipWait, "no-wait", resetSkipWait, `do not wait to reach RUNNING state`) - resetCmd.Flags().DurationVar(&resetTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var resetReq pipelines.ResetRequest + + var resetSkipWait bool + var resetTimeout time.Duration + + cmd.Flags().BoolVar(&resetSkipWait, "no-wait", resetSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&resetTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var resetCmd = &cobra.Command{ - Use: "reset PIPELINE_ID", - Short: `Reset a pipeline.`, - Long: `Reset a pipeline. + cmd.Use = "reset PIPELINE_ID" + cmd.Short = `Reset a pipeline.` + cmd.Long = `Reset a pipeline. - Resets a pipeline.`, + Resets a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -493,38 +791,143 @@ var resetCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resetOverrides { + fn(cmd, &resetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReset()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *pipelines.PipelinePermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq pipelines.PipelinePermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions PIPELINE_ID" + cmd.Short = `Set pipeline permissions.` + cmd.Long = `Set pipeline permissions. + + Sets permissions on a pipeline. Pipelines can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + setPermissionsReq.PipelineId = args[0] + + response, err := w.Pipelines.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start start-update command -var startUpdateReq pipelines.StartUpdate -var startUpdateJson flags.JsonFlag -func init() { - Cmd.AddCommand(startUpdateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startUpdateOverrides []func( + *cobra.Command, + *pipelines.StartUpdate, +) + +func newStartUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var startUpdateReq pipelines.StartUpdate + var startUpdateJson flags.JsonFlag + // TODO: short flags - startUpdateCmd.Flags().Var(&startUpdateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&startUpdateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - startUpdateCmd.Flags().Var(&startUpdateReq.Cause, "cause", ``) - startUpdateCmd.Flags().BoolVar(&startUpdateReq.FullRefresh, "full-refresh", startUpdateReq.FullRefresh, `If true, this update will reset all tables before running.`) + cmd.Flags().Var(&startUpdateReq.Cause, "cause", ``) + cmd.Flags().BoolVar(&startUpdateReq.FullRefresh, "full-refresh", startUpdateReq.FullRefresh, `If true, this update will reset all tables before running.`) // TODO: array: full_refresh_selection // TODO: array: refresh_selection -} - -var startUpdateCmd = &cobra.Command{ - Use: "start-update PIPELINE_ID", - Short: `Queue a pipeline update.`, - Long: `Queue a pipeline update. + cmd.Use = "start-update PIPELINE_ID" + cmd.Short = `Queue a pipeline update.` + cmd.Long = `Queue a pipeline update. - Starts or queues a pipeline update.`, + Starts or queues a pipeline update.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -558,37 +961,57 @@ var startUpdateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startUpdateOverrides { + fn(cmd, &startUpdateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStartUpdate()) + }) } // start stop command -var stopReq pipelines.StopRequest -var stopSkipWait bool -var stopTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var stopOverrides []func( + *cobra.Command, + *pipelines.StopRequest, +) -func init() { - Cmd.AddCommand(stopCmd) +func newStop() *cobra.Command { + cmd := &cobra.Command{} - stopCmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach IDLE state`) - stopCmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach IDLE state`) + var stopReq pipelines.StopRequest + + var stopSkipWait bool + var stopTimeout time.Duration + + cmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach IDLE state`) + cmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach IDLE state`) // TODO: short flags -} - -var stopCmd = &cobra.Command{ - Use: "stop PIPELINE_ID", - Short: `Stop a pipeline.`, - Long: `Stop a pipeline. + cmd.Use = "stop PIPELINE_ID" + cmd.Short = `Stop a pipeline.` + cmd.Long = `Stop a pipeline. - Stops a pipeline.`, + Stops a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -628,53 +1051,74 @@ var stopCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range stopOverrides { + fn(cmd, &stopReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStop()) + }) } // start update command -var updateReq pipelines.EditPipeline -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *pipelines.EditPipeline, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq pipelines.EditPipeline + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.AllowDuplicateNames, "allow-duplicate-names", updateReq.AllowDuplicateNames, `If false, deployment will fail if name has changed and conflicts the name of another pipeline.`) - updateCmd.Flags().StringVar(&updateReq.Catalog, "catalog", updateReq.Catalog, `A catalog in Unity Catalog to publish data from this pipeline to.`) - updateCmd.Flags().StringVar(&updateReq.Channel, "channel", updateReq.Channel, `DLT Release Channel that specifies which version to use.`) + cmd.Flags().BoolVar(&updateReq.AllowDuplicateNames, "allow-duplicate-names", updateReq.AllowDuplicateNames, `If false, deployment will fail if name has changed and conflicts the name of another pipeline.`) + cmd.Flags().StringVar(&updateReq.Catalog, "catalog", updateReq.Catalog, `A catalog in Unity Catalog to publish data from this pipeline to.`) + cmd.Flags().StringVar(&updateReq.Channel, "channel", updateReq.Channel, `DLT Release Channel that specifies which version to use.`) // TODO: array: clusters // TODO: map via StringToStringVar: configuration - updateCmd.Flags().BoolVar(&updateReq.Continuous, "continuous", updateReq.Continuous, `Whether the pipeline is continuous or triggered.`) - updateCmd.Flags().BoolVar(&updateReq.Development, "development", updateReq.Development, `Whether the pipeline is in Development mode.`) - updateCmd.Flags().StringVar(&updateReq.Edition, "edition", updateReq.Edition, `Pipeline product edition.`) - updateCmd.Flags().Int64Var(&updateReq.ExpectedLastModified, "expected-last-modified", updateReq.ExpectedLastModified, `If present, the last-modified time of the pipeline settings before the edit.`) + cmd.Flags().BoolVar(&updateReq.Continuous, "continuous", updateReq.Continuous, `Whether the pipeline is continuous or triggered.`) + cmd.Flags().BoolVar(&updateReq.Development, "development", updateReq.Development, `Whether the pipeline is in Development mode.`) + cmd.Flags().StringVar(&updateReq.Edition, "edition", updateReq.Edition, `Pipeline product edition.`) + cmd.Flags().Int64Var(&updateReq.ExpectedLastModified, "expected-last-modified", updateReq.ExpectedLastModified, `If present, the last-modified time of the pipeline settings before the edit.`) // TODO: complex arg: filters - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Unique identifier for this pipeline.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Unique identifier for this pipeline.`) // TODO: array: libraries - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Friendly identifier for this pipeline.`) - updateCmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`) - updateCmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`) - updateCmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) - updateCmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`) - updateCmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Friendly identifier for this pipeline.`) + cmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`) + cmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`) + cmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) + cmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`) + cmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`) // TODO: complex arg: trigger -} - -var updateCmd = &cobra.Command{ - Use: "update PIPELINE_ID", - Short: `Edit a pipeline.`, - Long: `Edit a pipeline. + cmd.Use = "update PIPELINE_ID" + cmd.Short = `Edit a pipeline.` + cmd.Long = `Edit a pipeline. - Updates a pipeline with the supplied configuration.`, + Updates a pipeline with the supplied configuration.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -709,10 +1153,108 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *pipelines.PipelinePermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq pipelines.PipelinePermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions PIPELINE_ID" + cmd.Short = `Update pipeline permissions.` + cmd.Long = `Update pipeline permissions. + + Updates the permissions on a pipeline. Pipelines can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + updatePermissionsReq.PipelineId = args[0] + + response, err := w.Pipelines.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Pipelines diff --git a/cmd/workspace/policy-families/policy-families.go b/cmd/workspace/policy-families/policy-families.go index 37b45c8e1..532317f7f 100755 --- a/cmd/workspace/policy-families/policy-families.go +++ b/cmd/workspace/policy-families/policy-families.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "policy-families", - Short: `View available policy families.`, - Long: `View available policy families. A policy family contains a policy definition +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "policy-families", + Short: `View available policy families.`, + Long: `View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. Databricks manages and provides policy families for several common cluster use @@ -22,30 +27,51 @@ var Cmd = &cobra.Command{ Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq compute.GetPolicyFamilyRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetPolicyFamilyRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetPolicyFamilyRequest + // TODO: short flags -} + cmd.Use = "get POLICY_FAMILY_ID" + cmd.Short = `Get policy family information.` + cmd.Long = `Get policy family information. + + Retrieve the information for an policy family based on its identifier.` -var getCmd = &cobra.Command{ - Use: "get POLICY_FAMILY_ID", + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -56,39 +82,65 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq compute.ListPolicyFamiliesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *compute.ListPolicyFamiliesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq compute.ListPolicyFamiliesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().Int64Var(&listReq.MaxResults, "max-results", listReq.MaxResults, `The max number of policy families to return.`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) + cmd.Flags().Int64Var(&listReq.MaxResults, "max-results", listReq.MaxResults, `The max number of policy families to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) -} + cmd.Use = "list" + cmd.Short = `List policy families.` + cmd.Long = `List policy families. + + Retrieve a list of policy families. This API is paginated.` -var listCmd = &cobra.Command{ - Use: "list", + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -105,10 +157,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service PolicyFamilies diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go index 58ed33954..db2a98448 100755 --- a/cmd/workspace/providers/providers.go +++ b/cmd/workspace/providers/providers.go @@ -12,47 +12,71 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "providers", - Short: `Databricks Providers REST API.`, - Long: `Databricks Providers REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "providers", + Short: `A data provider is an object representing the organization in the real world who shares the data.`, + Long: `A data provider is an object representing the organization in the real world + who shares the data. A provider contains shares which further contain the + shared data.`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateProvider -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateProvider, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateProvider + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`) - createCmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`) + cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME AUTHENTICATION_TYPE", - Short: `Create an auth provider.`, - Long: `Create an auth provider. + cmd.Use = "create NAME AUTHENTICATION_TYPE" + cmd.Short = `Create an auth provider.` + cmd.Long = `Create an auth provider. Creates a new authentication provider minimally based on a name and - authentication type. The caller must be an admin on the metastore.`, + authentication type. The caller must be an admin on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,32 +98,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteProviderRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteProviderRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteProviderRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a provider.`, - Long: `Delete a provider. + cmd.Use = "delete NAME" + cmd.Short = `Delete a provider.` + cmd.Long = `Delete a provider. Deletes an authentication provider, if the caller is a metastore admin or is - the owner of the provider.`, + the owner of the provider.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -127,33 +172,54 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetProviderRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetProviderRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetProviderRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a provider.`, - Long: `Get a provider. + cmd.Use = "get NAME" + cmd.Short = `Get a provider.` + cmd.Long = `Get a provider. Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the - provider.`, + provider.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -181,45 +247,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sharing.ListProvidersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sharing.ListProvidersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sharing.ListProvidersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.DataProviderGlobalMetastoreId, "data-provider-global-metastore-id", listReq.DataProviderGlobalMetastoreId, `If not provided, all providers will be returned.`) + cmd.Flags().StringVar(&listReq.DataProviderGlobalMetastoreId, "data-provider-global-metastore-id", listReq.DataProviderGlobalMetastoreId, `If not provided, all providers will be returned.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List providers.`, - Long: `List providers. + cmd.Use = "list" + cmd.Short = `List providers.` + cmd.Long = `List providers. Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific - ordering of the elements in the array.`, + ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -236,33 +324,54 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-shares command -var listSharesReq sharing.ListSharesRequest -func init() { - Cmd.AddCommand(listSharesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSharesOverrides []func( + *cobra.Command, + *sharing.ListSharesRequest, +) + +func newListShares() *cobra.Command { + cmd := &cobra.Command{} + + var listSharesReq sharing.ListSharesRequest + // TODO: short flags -} - -var listSharesCmd = &cobra.Command{ - Use: "list-shares NAME", - Short: `List shares by Provider.`, - Long: `List shares by Provider. + cmd.Use = "list-shares NAME" + cmd.Short = `List shares by Provider.` + cmd.Long = `List shares by Provider. Gets an array of a specified provider's shares within the metastore where: - * the caller is a metastore admin, or * the caller is the owner.`, + * the caller is a metastore admin, or * the caller is the owner.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -290,41 +399,62 @@ var listSharesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSharesOverrides { + fn(cmd, &listSharesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListShares()) + }) } // start update command -var updateReq sharing.UpdateProvider -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateProvider, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateProvider + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the Provider.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`) - updateCmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the Provider.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`) + cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a provider.`, - Long: `Update a provider. + cmd.Use = "update NAME" + cmd.Short = `Update a provider.` + cmd.Long = `Update a provider. Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the - provider.`, + provider.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -359,10 +489,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Providers diff --git a/cmd/workspace/queries/overrides.go b/cmd/workspace/queries/overrides.go index 86f47388e..a06dabdeb 100644 --- a/cmd/workspace/queries/overrides.go +++ b/cmd/workspace/queries/overrides.go @@ -1,11 +1,19 @@ package queries -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *sql.ListQueriesRequest) { // TODO: figure out colored/non-colored headers and colspan shifts listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "Author"}} {{range .}}{{.Id|green}} {{.Name|cyan}} {{.User.Email|cyan}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/queries/queries.go b/cmd/workspace/queries/queries.go index 3f7e8ca31..3512adaaf 100755 --- a/cmd/workspace/queries/queries.go +++ b/cmd/workspace/queries/queries.go @@ -12,40 +12,53 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "queries", - Short: `These endpoints are used for CRUD operations on query definitions.`, - Long: `These endpoints are used for CRUD operations on query definitions. Query +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "queries", + Short: `These endpoints are used for CRUD operations on query definitions.`, + Long: `These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.QueryPostContent -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.QueryPostContent, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.QueryPostContent + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.DataSourceId, "data-source-id", createReq.DataSourceId, `The ID of the data source / SQL warehouse where this query will run.`) - createCmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `General description that can convey additional information about this query such as usage notes.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `The name or title of this query to display in list views.`) - // TODO: any: options - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the query.`) - createCmd.Flags().StringVar(&createReq.Query, "query", createReq.Query, `The text of the query.`) - -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new query definition.`, - Long: `Create a new query definition. + cmd.Use = "create" + cmd.Short = `Create a new query definition.` + cmd.Long = `Create a new query definition. Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. @@ -55,18 +68,20 @@ var createCmd = &cobra.Command{ available SQL warehouses. Or you can copy the data_source_id from an existing query. - **Note**: You cannot add a visualization until you create the query.`, + **Note**: You cannot add a visualization until you create the query.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -76,6 +91,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Queries.Create(ctx, createReq) @@ -83,33 +99,54 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteQueryRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteQueryRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteQueryRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete QUERY_ID", - Short: `Delete a query.`, - Long: `Delete a query. + cmd.Use = "delete QUERY_ID" + cmd.Short = `Delete a query.` + cmd.Long = `Delete a query. Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is - deleted after 30 days.`, + deleted after 30 days.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -137,32 +174,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sql.GetQueryRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetQueryRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sql.GetQueryRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get QUERY_ID", - Short: `Get a query definition.`, - Long: `Get a query definition. + cmd.Use = "get QUERY_ID" + cmd.Short = `Get a query definition.` + cmd.Long = `Get a query definition. Retrieve a query object definition along with contextual permissions - information about the currently authenticated user.`, + information about the currently authenticated user.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -190,46 +248,68 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sql.ListQueriesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListQueriesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListQueriesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Order, "order", listReq.Order, `Name of query attribute to order by.`) - listCmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) - listCmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of queries to return per page.`) - listCmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) + cmd.Flags().StringVar(&listReq.Order, "order", listReq.Order, `Name of query attribute to order by.`) + cmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of queries to return per page.`) + cmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get a list of queries.`, - Long: `Get a list of queries. + cmd.Use = "list" + cmd.Short = `Get a list of queries.` + cmd.Long = `Get a list of queries. Gets a list of queries. Optionally, this list can be filtered by a search - term.`, + term.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -246,32 +326,53 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start restore command -var restoreReq sql.RestoreQueryRequest -func init() { - Cmd.AddCommand(restoreCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreOverrides []func( + *cobra.Command, + *sql.RestoreQueryRequest, +) + +func newRestore() *cobra.Command { + cmd := &cobra.Command{} + + var restoreReq sql.RestoreQueryRequest + // TODO: short flags -} - -var restoreCmd = &cobra.Command{ - Use: "restore QUERY_ID", - Short: `Restore a query.`, - Long: `Restore a query. + cmd.Use = "restore QUERY_ID" + cmd.Short = `Restore a query.` + cmd.Long = `Restore a query. Restore a query that has been moved to the trash. A restored query appears in - list views and searches. You can use restored queries for alerts.`, + list views and searches. You can use restored queries for alerts.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -299,41 +400,62 @@ var restoreCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreOverrides { + fn(cmd, &restoreReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestore()) + }) } // start update command -var updateReq sql.QueryEditContent -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.QueryEditContent, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.QueryEditContent + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `The ID of the data source / SQL warehouse where this query will run.`) - updateCmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that can convey additional information about this query such as usage notes.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name or title of this query to display in list views.`) + cmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `Data source ID.`) + cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that conveys additional information about this query such as usage notes.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`) // TODO: any: options - updateCmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query.`) + cmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query to be run.`) -} - -var updateCmd = &cobra.Command{ - Use: "update QUERY_ID", - Short: `Change a query definition.`, - Long: `Change a query definition. + cmd.Use = "update QUERY_ID" + cmd.Short = `Change a query definition.` + cmd.Long = `Change a query definition. Modify this query definition. - **Note**: You cannot undo this operation.`, + **Note**: You cannot undo this operation.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -367,10 +489,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Queries diff --git a/cmd/workspace/query-history/overrides.go b/cmd/workspace/query-history/overrides.go index 7e7020697..e0d79423c 100644 --- a/cmd/workspace/query-history/overrides.go +++ b/cmd/workspace/query-history/overrides.go @@ -1,10 +1,18 @@ package query_history -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *sql.ListQueryHistoryRequest) { // TODO: figure out the right format listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.UserName}} {{cyan "%s" .Status}} {{.QueryText}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/query-history/query-history.go b/cmd/workspace/query-history/query-history.go index 5b1e86d0d..1593d6766 100755 --- a/cmd/workspace/query-history/query-history.go +++ b/cmd/workspace/query-history/query-history.go @@ -10,50 +10,72 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "query-history", - Short: `Access the history of queries through SQL warehouses.`, - Long: `Access the history of queries through SQL warehouses.`, - Annotations: map[string]string{ - "package": "sql", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "query-history", + Short: `Access the history of queries through SQL warehouses.`, + Long: `Access the history of queries through SQL warehouses.`, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start list command -var listReq sql.ListQueryHistoryRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListQueryHistoryRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListQueryHistoryRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: filter_by - listCmd.Flags().BoolVar(&listReq.IncludeMetrics, "include-metrics", listReq.IncludeMetrics, `Whether to include metrics about query.`) - listCmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) + cmd.Flags().BoolVar(&listReq.IncludeMetrics, "include-metrics", listReq.IncludeMetrics, `Whether to include metrics about query.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List Queries.`, - Long: `List Queries. + cmd.Use = "list" + cmd.Short = `List Queries.` + cmd.Long = `List Queries. List the history of queries through SQL warehouses. - You can filter by user ID, warehouse ID, status, and time range.`, + You can filter by user ID, warehouse ID, status, and time range.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -70,10 +92,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service QueryHistory diff --git a/cmd/workspace/query-visualizations/query-visualizations.go b/cmd/workspace/query-visualizations/query-visualizations.go new file mode 100755 index 000000000..fae0f9341 --- /dev/null +++ b/cmd/workspace/query-visualizations/query-visualizations.go @@ -0,0 +1,236 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package query_visualizations + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "query-visualizations", + Short: `This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.`, + Long: `This is an evolving API that facilitates the addition and removal of + vizualisations from existing queries within the Databricks Workspace. Data + structures may change over time.`, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateQueryVisualizationRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateQueryVisualizationRequest + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "create" + cmd.Short = `Add visualization to a query.` + cmd.Long = `Add visualization to a query.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.QueryVisualizations.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteQueryVisualizationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteQueryVisualizationRequest + + // TODO: short flags + + cmd.Use = "delete ID" + cmd.Short = `Remove visualization.` + cmd.Long = `Remove visualization.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.QueryVisualizations.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.Visualization, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.Visualization + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update ID" + cmd.Short = `Edit existing visualization.` + cmd.Long = `Edit existing visualization.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.QueryVisualizations.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service QueryVisualizations diff --git a/cmd/workspace/recipient-activation/recipient-activation.go b/cmd/workspace/recipient-activation/recipient-activation.go index 33bc54ef2..c73b4b4a9 100755 --- a/cmd/workspace/recipient-activation/recipient-activation.go +++ b/cmd/workspace/recipient-activation/recipient-activation.go @@ -9,38 +9,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "recipient-activation", - Short: `Databricks Recipient Activation REST API.`, - Long: `Databricks Recipient Activation REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "recipient-activation", + Short: `The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of TOKEN.`, + Long: `The Recipient Activation API is only applicable in the open sharing model + where the recipient object has the authentication type of TOKEN. The data + recipient follows the activation link shared by the data provider to download + the credential file that includes the access token. The recipient will then + use the credential file to establish a secure connection with the provider to + receive the shared data. + + Note that you can download the credential file only once. Recipients should + treat the downloaded credential as a secret and must not share it outside of + their organization.`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get-activation-url-info command -var getActivationUrlInfoReq sharing.GetActivationUrlInfoRequest -func init() { - Cmd.AddCommand(getActivationUrlInfoCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getActivationUrlInfoOverrides []func( + *cobra.Command, + *sharing.GetActivationUrlInfoRequest, +) + +func newGetActivationUrlInfo() *cobra.Command { + cmd := &cobra.Command{} + + var getActivationUrlInfoReq sharing.GetActivationUrlInfoRequest + // TODO: short flags -} - -var getActivationUrlInfoCmd = &cobra.Command{ - Use: "get-activation-url-info ACTIVATION_URL", - Short: `Get a share activation URL.`, - Long: `Get a share activation URL. + cmd.Use = "get-activation-url-info ACTIVATION_URL" + cmd.Short = `Get a share activation URL.` + cmd.Long = `Get a share activation URL. - Gets an activation URL for a share.`, + Gets an activation URL for a share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -51,36 +82,58 @@ var getActivationUrlInfoCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getActivationUrlInfoOverrides { + fn(cmd, &getActivationUrlInfoReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetActivationUrlInfo()) + }) } // start retrieve-token command -var retrieveTokenReq sharing.RetrieveTokenRequest -func init() { - Cmd.AddCommand(retrieveTokenCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var retrieveTokenOverrides []func( + *cobra.Command, + *sharing.RetrieveTokenRequest, +) + +func newRetrieveToken() *cobra.Command { + cmd := &cobra.Command{} + + var retrieveTokenReq sharing.RetrieveTokenRequest + // TODO: short flags -} - -var retrieveTokenCmd = &cobra.Command{ - Use: "retrieve-token ACTIVATION_URL", - Short: `Get an access token.`, - Long: `Get an access token. + cmd.Use = "retrieve-token ACTIVATION_URL" + cmd.Short = `Get an access token.` + cmd.Long = `Get an access token. Retrieve access token with an activation url. This is a public API without any - authentication.`, + authentication.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -91,10 +144,24 @@ var retrieveTokenCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range retrieveTokenOverrides { + fn(cmd, &retrieveTokenReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRetrieveToken()) + }) } // end service RecipientActivation diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go index bb8f9b17f..ceed57848 100755 --- a/cmd/workspace/recipients/recipients.go +++ b/cmd/workspace/recipients/recipients.go @@ -12,52 +12,90 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "recipients", - Short: `Databricks Recipients REST API.`, - Long: `Databricks Recipients REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "recipients", + Short: `A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares.`, + Long: `A recipient is an object you create using :method:recipients/create to + represent an organization which you want to allow access shares. The way how + sharing works differs depending on whether or not your recipient has access to + a Databricks workspace that is enabled for Unity Catalog: + + - For recipients with access to a Databricks workspace that is enabled for + Unity Catalog, you can create a recipient object along with a unique sharing + identifier you get from the recipient. The sharing identifier is the key + identifier that enables the secure connection. This sharing mode is called + **Databricks-to-Databricks sharing**. + + - For recipients without access to a Databricks workspace that is enabled for + Unity Catalog, when you create a recipient object, Databricks generates an + activation link you can send to the recipient. The recipient follows the + activation link to download the credential file, and then uses the credential + file to establish a secure connection to receive the shared data. This sharing + mode is called **open sharing**.`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateRecipient -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateRecipient, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateRecipient + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the recipient.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the recipient.`) // TODO: any: data_recipient_global_metastore_id // TODO: complex arg: ip_access_list - createCmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of the recipient owner.`) + cmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of the recipient owner.`) // TODO: complex arg: properties_kvpairs - createCmd.Flags().StringVar(&createReq.SharingCode, "sharing-code", createReq.SharingCode, `The one-time sharing code provided by the data recipient.`) + cmd.Flags().StringVar(&createReq.SharingCode, "sharing-code", createReq.SharingCode, `The one-time sharing code provided by the data recipient.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME AUTHENTICATION_TYPE", - Short: `Create a share recipient.`, - Long: `Create a share recipient. + cmd.Use = "create NAME AUTHENTICATION_TYPE" + cmd.Short = `Create a share recipient.` + cmd.Long = `Create a share recipient. Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or has the - **CREATE_RECIPIENT** privilege on the metastore.`, + **CREATE_RECIPIENT** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -79,32 +117,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteRecipientRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteRecipientRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteRecipientRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a share recipient.`, - Long: `Delete a share recipient. + cmd.Use = "delete NAME" + cmd.Short = `Delete a share recipient.` + cmd.Long = `Delete a share recipient. Deletes the specified recipient from the metastore. The caller must be the - owner of the recipient.`, + owner of the recipient.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -132,33 +191,54 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetRecipientRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetRecipientRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetRecipientRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a share recipient.`, - Long: `Get a share recipient. + cmd.Use = "get NAME" + cmd.Short = `Get a share recipient.` + cmd.Long = `Get a share recipient. Gets a share recipient from the metastore if: - * the caller is the owner of the share recipient, or: * is a metastore admin`, + * the caller is the owner of the share recipient, or: * is a metastore admin` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -186,45 +266,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sharing.ListRecipientsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sharing.ListRecipientsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sharing.ListRecipientsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.DataRecipientGlobalMetastoreId, "data-recipient-global-metastore-id", listReq.DataRecipientGlobalMetastoreId, `If not provided, all recipients will be returned.`) + cmd.Flags().StringVar(&listReq.DataRecipientGlobalMetastoreId, "data-recipient-global-metastore-id", listReq.DataRecipientGlobalMetastoreId, `If not provided, all recipients will be returned.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List share recipients.`, - Long: `List share recipients. + cmd.Use = "list" + cmd.Short = `List share recipients.` + cmd.Long = `List share recipients. Gets an array of all share recipients within the current metastore where: * the caller is a metastore admin, or * the caller is the owner. There is no - guarantee of a specific ordering of the elements in the array.`, + guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -241,36 +343,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start rotate-token command -var rotateTokenReq sharing.RotateRecipientToken -func init() { - Cmd.AddCommand(rotateTokenCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var rotateTokenOverrides []func( + *cobra.Command, + *sharing.RotateRecipientToken, +) + +func newRotateToken() *cobra.Command { + cmd := &cobra.Command{} + + var rotateTokenReq sharing.RotateRecipientToken + // TODO: short flags -} - -var rotateTokenCmd = &cobra.Command{ - Use: "rotate-token EXISTING_TOKEN_EXPIRE_IN_SECONDS NAME", - Short: `Rotate a token.`, - Long: `Rotate a token. + cmd.Use = "rotate-token EXISTING_TOKEN_EXPIRE_IN_SECONDS NAME" + cmd.Short = `Rotate a token.` + cmd.Long = `Rotate a token. Refreshes the specified recipient's delta sharing authentication token with - the provided token info. The caller must be the owner of the recipient.`, + the provided token info. The caller must be the owner of the recipient.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -285,32 +409,53 @@ var rotateTokenCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range rotateTokenOverrides { + fn(cmd, &rotateTokenReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRotateToken()) + }) } // start share-permissions command -var sharePermissionsReq sharing.SharePermissionsRequest -func init() { - Cmd.AddCommand(sharePermissionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var sharePermissionsOverrides []func( + *cobra.Command, + *sharing.SharePermissionsRequest, +) + +func newSharePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var sharePermissionsReq sharing.SharePermissionsRequest + // TODO: short flags -} - -var sharePermissionsCmd = &cobra.Command{ - Use: "share-permissions NAME", - Short: `Get recipient share permissions.`, - Long: `Get recipient share permissions. + cmd.Use = "share-permissions NAME" + cmd.Short = `Get recipient share permissions.` + cmd.Long = `Get recipient share permissions. Gets the share permissions for the specified Recipient. The caller must be a - metastore admin or the owner of the Recipient.`, + metastore admin or the owner of the Recipient.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -338,41 +483,62 @@ var sharePermissionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range sharePermissionsOverrides { + fn(cmd, &sharePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSharePermissions()) + }) } // start update command -var updateReq sharing.UpdateRecipient -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateRecipient, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateRecipient + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the recipient.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the recipient.`) // TODO: complex arg: ip_access_list - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of Recipient.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of the recipient owner.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of Recipient.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of the recipient owner.`) // TODO: complex arg: properties_kvpairs -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a share recipient.`, - Long: `Update a share recipient. + cmd.Use = "update NAME" + cmd.Short = `Update a share recipient.` + cmd.Long = `Update a share recipient. Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, - the user must be both a metastore admin and the owner of the recipient.`, + the user must be both a metastore admin and the owner of the recipient.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -407,10 +573,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Recipients diff --git a/cmd/workspace/registered-models/registered-models.go b/cmd/workspace/registered-models/registered-models.go new file mode 100755 index 000000000..cc7822531 --- /dev/null +++ b/cmd/workspace/registered-models/registered-models.go @@ -0,0 +1,635 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package registered_models + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "registered-models", + Short: `Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.`, + Long: `Databricks provides a hosted version of MLflow Model Registry in Unity + Catalog. Models in Unity Catalog provide centralized access control, auditing, + lineage, and discovery of ML models across Databricks workspaces. + + An MLflow registered model resides in the third layer of Unity Catalog’s + three-level namespace. Registered models contain model versions, which + correspond to actual ML models (MLflow models). Creating new model versions + currently requires use of the MLflow Python client. Once model versions are + created, you can load them for batch inference using MLflow Python client + APIs, or deploy them for real-time serving using Databricks Model Serving. + + All operations on registered models and model versions require USE_CATALOG + permissions on the enclosing catalog and USE_SCHEMA permissions on the + enclosing schema. In addition, the following additional privileges are + required for various operations: + + * To create a registered model, users must additionally have the CREATE_MODEL + permission on the target schema. * To view registered model or model version + metadata, model version data files, or invoke a model version, users must + additionally have the EXECUTE permission on the registered model * To update + registered model or model version tags, users must additionally have APPLY TAG + permissions on the registered model * To update other registered model or + model version metadata (comments, aliases) create a new model version, or + update permissions on the registered model, users must be owners of the + registered model. + + Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. + tagging, grants) that specify a securable type, use "FUNCTION" as the + securable type.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateRegisteredModelRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateRegisteredModelRequest + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the registered model.`) + cmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud under which model version data files are stored.`) + + cmd.Use = "create CATALOG_NAME SCHEMA_NAME NAME" + cmd.Short = `Create a Registered Model.` + cmd.Long = `Create a Registered Model. + + Creates a new registered model in Unity Catalog. + + File storage for model versions in the registered model will be located in the + default location which is specified by the parent schema, or the parent + catalog, or the Metastore. + + For registered model creation to succeed, the user must satisfy the following + conditions: - The caller must be a metastore admin, or be the owner of the + parent catalog and schema, or have the **USE_CATALOG** privilege on the parent + catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller + must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(3) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + createReq.CatalogName = args[0] + createReq.SchemaName = args[1] + createReq.Name = args[2] + } + + response, err := w.RegisteredModels.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteRegisteredModelRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteRegisteredModelRequest + + // TODO: short flags + + cmd.Use = "delete FULL_NAME" + cmd.Short = `Delete a Registered Model.` + cmd.Long = `Delete a Registered Model. + + Deletes a registered model and all its model versions from the specified + parent catalog and schema. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Registered Models drop-down." + names, err := w.RegisteredModels.RegisteredModelInfoNameToFullNameMap(ctx, catalog.ListRegisteredModelsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Registered Models drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the registered model") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the registered model") + } + deleteReq.FullName = args[0] + + err = w.RegisteredModels.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start delete-alias command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteAliasOverrides []func( + *cobra.Command, + *catalog.DeleteAliasRequest, +) + +func newDeleteAlias() *cobra.Command { + cmd := &cobra.Command{} + + var deleteAliasReq catalog.DeleteAliasRequest + + // TODO: short flags + + cmd.Use = "delete-alias FULL_NAME ALIAS" + cmd.Short = `Delete a Registered Model Alias.` + cmd.Long = `Delete a Registered Model Alias. + + Deletes a registered model alias. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteAliasReq.FullName = args[0] + deleteAliasReq.Alias = args[1] + + err = w.RegisteredModels.DeleteAlias(ctx, deleteAliasReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteAliasOverrides { + fn(cmd, &deleteAliasReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteAlias()) + }) +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetRegisteredModelRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetRegisteredModelRequest + + // TODO: short flags + + cmd.Use = "get FULL_NAME" + cmd.Short = `Get a Registered Model.` + cmd.Long = `Get a Registered Model. + + Get a registered model. + + The caller must be a metastore admin or an owner of (or have the **EXECUTE** + privilege on) the registered model. For the latter case, the caller must also + be the owner or have the **USE_CATALOG** privilege on the parent catalog and + the **USE_SCHEMA** privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Registered Models drop-down." + names, err := w.RegisteredModels.RegisteredModelInfoNameToFullNameMap(ctx, catalog.ListRegisteredModelsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Registered Models drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the registered model") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the registered model") + } + getReq.FullName = args[0] + + response, err := w.RegisteredModels.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListRegisteredModelsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListRegisteredModelsRequest + var listJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&listReq.CatalogName, "catalog-name", listReq.CatalogName, `The identifier of the catalog under which to list registered models.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Max number of registered models to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + cmd.Flags().StringVar(&listReq.SchemaName, "schema-name", listReq.SchemaName, `The identifier of the schema under which to list registered models.`) + + cmd.Use = "list" + cmd.Short = `List Registered Models.` + cmd.Long = `List Registered Models. + + List registered models. You can list registered models under a particular + schema, or list all registered models in the current metastore. + + The returned models are filtered based on the privileges of the calling user. + For example, the metastore admin is able to list all the registered models. A + regular user needs to be the owner or have the **EXECUTE** privilege on the + registered model to recieve the registered models in the response. For the + latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema. + + There is no guarantee of a specific ordering of the elements in the response.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = listJson.Unmarshal(&listReq) + if err != nil { + return err + } + } else { + } + + response, err := w.RegisteredModels.ListAll(ctx, listReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-alias command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setAliasOverrides []func( + *cobra.Command, + *catalog.SetRegisteredModelAliasRequest, +) + +func newSetAlias() *cobra.Command { + cmd := &cobra.Command{} + + var setAliasReq catalog.SetRegisteredModelAliasRequest + var setAliasJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setAliasJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "set-alias FULL_NAME ALIAS VERSION_NUM" + cmd.Short = `Set a Registered Model Alias.` + cmd.Long = `Set a Registered Model Alias. + + Set an alias on the specified registered model. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(3) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setAliasJson.Unmarshal(&setAliasReq) + if err != nil { + return err + } + } else { + setAliasReq.FullName = args[0] + setAliasReq.Alias = args[1] + _, err = fmt.Sscan(args[2], &setAliasReq.VersionNum) + if err != nil { + return fmt.Errorf("invalid VERSION_NUM: %s", args[2]) + } + } + + response, err := w.RegisteredModels.SetAlias(ctx, setAliasReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setAliasOverrides { + fn(cmd, &setAliasReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetAlias()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateRegisteredModelRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateRegisteredModelRequest + + // TODO: short flags + + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the registered model.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the registered model.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The identifier of the user who owns the registered model.`) + + cmd.Use = "update FULL_NAME" + cmd.Short = `Update a Registered Model.` + cmd.Long = `Update a Registered Model. + + Updates the specified registered model. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema. + + Currently only the name, the owner or the comment of the registered model can + be updated.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Registered Models drop-down." + names, err := w.RegisteredModels.RegisteredModelInfoNameToFullNameMap(ctx, catalog.ListRegisteredModelsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Registered Models drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the registered model") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the registered model") + } + updateReq.FullName = args[0] + + response, err := w.RegisteredModels.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service RegisteredModels diff --git a/cmd/workspace/repos/overrides.go b/cmd/workspace/repos/overrides.go index 127a794a5..f6f26f81d 100644 --- a/cmd/workspace/repos/overrides.go +++ b/cmd/workspace/repos/overrides.go @@ -7,16 +7,19 @@ import ( "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/spf13/cobra" ) -func init() { +func listOverride(listCmd *cobra.Command, listReq *workspace.ListReposRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{green "%d" .Id}} {{.Path}} {{.Branch|blue}} {{.Url|cyan}} {{end}}`) +} +func createOverride(createCmd *cobra.Command, createReq *workspace.CreateRepo) { createCmd.Use = "create URL [PROVIDER]" createCmd.Args = func(cmd *cobra.Command, args []string) error { // If the provider argument is not specified, we try to detect it from the URL. @@ -26,11 +29,13 @@ func init() { } return check(cmd, args) } + + createJson := createCmd.Flag("json").Value.(*flags.JsonFlag) createCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) if cmd.Flags().Changed("json") { - err = createJson.Unmarshal(&createReq) + err = createJson.Unmarshal(createReq) if err != nil { return err } @@ -46,13 +51,15 @@ func init() { } } } - response, err := w.Repos.Create(ctx, createReq) + response, err := w.Repos.Create(ctx, *createReq) if err != nil { return err } return cmdio.Render(ctx, response) } +} +func deleteOverride(deleteCmd *cobra.Command, deleteReq *workspace.DeleteRepoRequest) { deleteCmd.Use = "delete REPO_ID_OR_PATH" deleteCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -62,13 +69,15 @@ func init() { if err != nil { return err } - err = w.Repos.Delete(ctx, deleteReq) + err = w.Repos.Delete(ctx, *deleteReq) if err != nil { return err } return nil } +} +func getOverride(getCmd *cobra.Command, getReq *workspace.GetRepoRequest) { getCmd.Use = "get REPO_ID_OR_PATH" getCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -78,14 +87,18 @@ func init() { return err } - response, err := w.Repos.Get(ctx, getReq) + response, err := w.Repos.Get(ctx, *getReq) if err != nil { return err } return cmdio.Render(ctx, response) } +} +func updateOverride(updateCmd *cobra.Command, updateReq *workspace.UpdateRepo) { updateCmd.Use = "update REPO_ID_OR_PATH" + + updateJson := updateCmd.Flag("json").Value.(*flags.JsonFlag) updateCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -101,7 +114,7 @@ func init() { } } - err = w.Repos.Update(ctx, updateReq) + err = w.Repos.Update(ctx, *updateReq) if err != nil { return err } @@ -147,3 +160,11 @@ func repoArgumentToRepoID(ctx context.Context, w *databricks.WorkspaceClient, ar } return oi.ObjectId, nil } + +func init() { + listOverrides = append(listOverrides, listOverride) + createOverrides = append(createOverrides, createOverride) + deleteOverrides = append(deleteOverrides, deleteOverride) + getOverrides = append(getOverrides, getOverride) + updateOverrides = append(updateOverrides, updateOverride) +} diff --git a/cmd/workspace/repos/repos.go b/cmd/workspace/repos/repos.go index fdd9556d4..2d510e904 100755 --- a/cmd/workspace/repos/repos.go +++ b/cmd/workspace/repos/repos.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "repos", - Short: `The Repos API allows users to manage their git repos.`, - Long: `The Repos API allows users to manage their git repos. Users can use the API to +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "repos", + Short: `The Repos API allows users to manage their git repos.`, + Long: `The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on. Databricks Repos is a visual Git client in Databricks. It supports common Git @@ -25,44 +30,61 @@ var Cmd = &cobra.Command{ Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD.`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq workspace.CreateRepo -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *workspace.CreateRepo, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq workspace.CreateRepo + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Path, "path", createReq.Path, `Desired path for the repo in the workspace.`) + cmd.Flags().StringVar(&createReq.Path, "path", createReq.Path, `Desired path for the repo in the workspace.`) // TODO: complex arg: sparse_checkout -} - -var createCmd = &cobra.Command{ - Use: "create URL PROVIDER", - Short: `Create a repo.`, - Long: `Create a repo. + cmd.Use = "create URL PROVIDER" + cmd.Short = `Create a repo.` + cmd.Long = `Create a repo. Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, - unlike repos created in the browser.`, + unlike repos created in the browser.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,31 +103,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq workspace.DeleteRepoRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *workspace.DeleteRepoRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq workspace.DeleteRepoRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete REPO_ID", - Short: `Delete a repo.`, - Long: `Delete a repo. + cmd.Use = "delete REPO_ID" + cmd.Short = `Delete a repo.` + cmd.Long = `Delete a repo. - Deletes the specified repo.`, + Deletes the specified repo.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -136,31 +179,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq workspace.GetRepoRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *workspace.GetRepoRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq workspace.GetRepoRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get REPO_ID", - Short: `Get a repo.`, - Long: `Get a repo. + cmd.Use = "get REPO_ID" + cmd.Short = `Get a repo.` + cmd.Long = `Get a repo. - Returns the repo with the given repo ID.`, + Returns the repo with the given repo ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -191,44 +255,213 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *workspace.GetRepoPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq workspace.GetRepoPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels REPO_ID" + cmd.Short = `Get repo permission levels.` + cmd.Long = `Get repo permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + getPermissionLevelsReq.RepoId = args[0] + + response, err := w.Repos.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *workspace.GetRepoPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq workspace.GetRepoPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions REPO_ID" + cmd.Short = `Get repo permissions.` + cmd.Long = `Get repo permissions. + + Gets the permissions of a repo. Repos can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + getPermissionsReq.RepoId = args[0] + + response, err := w.Repos.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -var listReq workspace.ListReposRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *workspace.ListReposRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq workspace.ListReposRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.NextPageToken, "next-page-token", listReq.NextPageToken, `Token used to get the next page of results.`) - listCmd.Flags().StringVar(&listReq.PathPrefix, "path-prefix", listReq.PathPrefix, `Filters repos that have paths starting with the given path prefix.`) + cmd.Flags().StringVar(&listReq.NextPageToken, "next-page-token", listReq.NextPageToken, `Token used to get the next page of results.`) + cmd.Flags().StringVar(&listReq.PathPrefix, "path-prefix", listReq.PathPrefix, `Filters repos that have paths starting with the given path prefix.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get repos.`, - Long: `Get repos. + cmd.Use = "list" + cmd.Short = `Get repos.` + cmd.Long = `Get repos. Returns repos that the calling user has Manage permissions on. Results are - paginated with each page containing twenty repos.`, + paginated with each page containing twenty repos.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -245,38 +478,143 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *workspace.RepoPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq workspace.RepoPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions REPO_ID" + cmd.Short = `Set repo permissions.` + cmd.Long = `Set repo permissions. + + Sets permissions on a repo. Repos can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + setPermissionsReq.RepoId = args[0] + + response, err := w.Repos.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start update command -var updateReq workspace.UpdateRepo -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *workspace.UpdateRepo, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq workspace.UpdateRepo + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Branch, "branch", updateReq.Branch, `Branch that the local version of the repo is checked out to.`) + cmd.Flags().StringVar(&updateReq.Branch, "branch", updateReq.Branch, `Branch that the local version of the repo is checked out to.`) // TODO: complex arg: sparse_checkout - updateCmd.Flags().StringVar(&updateReq.Tag, "tag", updateReq.Tag, `Tag that the local version of the repo is checked out to.`) + cmd.Flags().StringVar(&updateReq.Tag, "tag", updateReq.Tag, `Tag that the local version of the repo is checked out to.`) -} - -var updateCmd = &cobra.Command{ - Use: "update REPO_ID", - Short: `Update a repo.`, - Long: `Update a repo. + cmd.Use = "update REPO_ID" + cmd.Short = `Update a repo.` + cmd.Long = `Update a repo. Updates the repo to a different branch or tag, or updates the repo to the - latest commit on the same branch.`, + latest commit on the same branch.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -313,10 +651,108 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *workspace.RepoPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq workspace.RepoPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions REPO_ID" + cmd.Short = `Update repo permissions.` + cmd.Long = `Update repo permissions. + + Updates the permissions on a repo. Repos can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + updatePermissionsReq.RepoId = args[0] + + response, err := w.Repos.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Repos diff --git a/cmd/workspace/schemas/overrides.go b/cmd/workspace/schemas/overrides.go index 4ff8bf124..180690b6e 100644 --- a/cmd/workspace/schemas/overrides.go +++ b/cmd/workspace/schemas/overrides.go @@ -1,10 +1,18 @@ package schemas -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *catalog.ListSchemasRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Full Name"}} {{header "Owner"}} {{header "Comment"}} {{range .}}{{.FullName|green}} {{.Owner|cyan}} {{.Comment}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/schemas/schemas.go b/cmd/workspace/schemas/schemas.go index 4a6eb33b9..fddf986de 100755 --- a/cmd/workspace/schemas/schemas.go +++ b/cmd/workspace/schemas/schemas.go @@ -12,53 +12,75 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "schemas", - Short: `A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.`, - Long: `A schema (also called a database) is the second layer of Unity Catalog’s +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "schemas", + Short: `A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.`, + Long: `A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT permission on the table or view.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateSchema -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateSchema, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateSchema + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) // TODO: map via StringToStringVar: properties - createCmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within schema.`) + cmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within schema.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME CATALOG_NAME", - Short: `Create a schema.`, - Long: `Create a schema. + cmd.Use = "create NAME CATALOG_NAME" + cmd.Short = `Create a schema.` + cmd.Long = `Create a schema. Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent - catalog.`, + catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -77,32 +99,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteSchemaRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteSchemaRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteSchemaRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME", - Short: `Delete a schema.`, - Long: `Delete a schema. + cmd.Use = "delete FULL_NAME" + cmd.Short = `Delete a schema.` + cmd.Long = `Delete a schema. Deletes the specified schema from the parent catalog. The caller must be the - owner of the schema or an owner of the parent catalog.`, + owner of the schema or an owner of the parent catalog.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -130,33 +173,54 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetSchemaRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetSchemaRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetSchemaRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get FULL_NAME", - Short: `Get a schema.`, - Long: `Get a schema. + cmd.Use = "get FULL_NAME" + cmd.Short = `Get a schema.` + cmd.Long = `Get a schema. Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** - privilege on the schema.`, + privilege on the schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -184,39 +248,61 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListSchemasRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListSchemasRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListSchemasRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME", - Short: `List schemas.`, - Long: `List schemas. + cmd.Use = "list CATALOG_NAME" + cmd.Short = `List schemas.` + cmd.Long = `List schemas. Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is - no guarantee of a specific ordering of the elements in the array.`, + no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -227,42 +313,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateSchema -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateSchema, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateSchema + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of schema, relative to parent catalog.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of schema.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of schema, relative to parent catalog.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of schema.`) // TODO: map via StringToStringVar: properties -} - -var updateCmd = &cobra.Command{ - Use: "update FULL_NAME", - Short: `Update a schema.`, - Long: `Update a schema. + cmd.Use = "update FULL_NAME" + cmd.Short = `Update a schema.` + cmd.Long = `Update a schema. Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on - the parent catalog.`, + the parent catalog.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -296,10 +403,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Schemas diff --git a/cmd/workspace/secrets/overrides.go b/cmd/workspace/secrets/overrides.go index 5443aca28..40c7babab 100644 --- a/cmd/workspace/secrets/overrides.go +++ b/cmd/workspace/secrets/overrides.go @@ -1,121 +1,22 @@ package secrets import ( - "encoding/base64" - "fmt" - "io" - "os" - - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/cli/libs/flags" - "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/spf13/cobra" ) -func init() { +func cmdOverride(cmd *cobra.Command) { + cmd.AddCommand(newPutSecret()) +} + +func listScopesOverride(listScopesCmd *cobra.Command) { listScopesCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Scope"}} {{header "Backend Type"}} {{range .}}{{.Name|green}} {{.BackendType}} {{end}}`) - - Cmd.AddCommand(putSecretCmd) - // TODO: short flags - putSecretCmd.Flags().Var(&putSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) - - putSecretCmd.Flags().StringVar(&putSecretReq.BytesValue, "bytes-value", putSecretReq.BytesValue, `If specified, value will be stored as bytes.`) - putSecretCmd.Flags().StringVar(&putSecretReq.StringValue, "string-value", putSecretReq.StringValue, `If specified, note that the value will be stored in UTF-8 (MB4) form.`) } -var putSecretReq workspace.PutSecret -var putSecretJson flags.JsonFlag - -var putSecretCmd = &cobra.Command{ - Use: "put-secret SCOPE KEY", - Short: `Add a secret.`, - Long: `Add a secret. - - Inserts a secret under the provided scope with the given name. If a secret - already exists with the same name, this command overwrites the existing - secret's value. The server encrypts the secret using the secret scope's - encryption settings before storing it. - - You must have WRITE or MANAGE permission on the secret scope. The secret - key must consist of alphanumeric characters, dashes, underscores, and periods, - and cannot exceed 128 characters. The maximum allowed secret value size is 128 - KB. The maximum number of secrets in a given scope is 1000. - - The arguments "string-value" or "bytes-value" specify the type of the secret, - which will determine the value returned when the secret value is requested. - - You can specify the secret value in one of three ways: - * Specify the value as a string using the --string-value flag. - * Input the secret when prompted interactively (single-line secrets). - * Pass the secret via standard input (multi-line secrets). - `, - - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(2) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { - ctx := cmd.Context() - w := root.WorkspaceClient(ctx) - - bytesValueChanged := cmd.Flags().Changed("bytes-value") - stringValueChanged := cmd.Flags().Changed("string-value") - if bytesValueChanged && stringValueChanged { - return fmt.Errorf("cannot specify both --bytes-value and --string-value") - } - - if cmd.Flags().Changed("json") { - err = putSecretJson.Unmarshal(&putSecretReq) - if err != nil { - return err - } - } else { - putSecretReq.Scope = args[0] - putSecretReq.Key = args[1] - - switch { - case bytesValueChanged: - // Bytes value set; encode as base64. - putSecretReq.BytesValue = base64.StdEncoding.EncodeToString([]byte(putSecretReq.BytesValue)) - case stringValueChanged: - // String value set; nothing to do. - default: - // Neither is specified; read secret value from stdin. - bytes, err := promptSecret(cmd) - if err != nil { - return err - } - putSecretReq.BytesValue = base64.StdEncoding.EncodeToString(bytes) - } - } - - err = w.Secrets.PutSecret(ctx, putSecretReq) - if err != nil { - return err - } - return nil - }, -} - -func promptSecret(cmd *cobra.Command) ([]byte, error) { - // If stdin is a TTY, prompt for the secret. - if !cmdio.IsInTTY(cmd.Context()) { - return io.ReadAll(os.Stdin) - } - - value, err := cmdio.Secret(cmd.Context(), "Please enter your secret value") - if err != nil { - return nil, err - } - - return []byte(value), nil +func init() { + cmdOverrides = append(cmdOverrides, cmdOverride) + listScopesOverrides = append(listScopesOverrides, listScopesOverride) } diff --git a/cmd/workspace/secrets/put_secret.go b/cmd/workspace/secrets/put_secret.go new file mode 100644 index 000000000..2fbf49c5c --- /dev/null +++ b/cmd/workspace/secrets/put_secret.go @@ -0,0 +1,122 @@ +package secrets + +import ( + "encoding/base64" + "fmt" + "io" + "os" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/spf13/cobra" +) + +func newPutSecret() *cobra.Command { + cmd := &cobra.Command{} + + var putSecretReq workspace.PutSecret + var putSecretJson flags.JsonFlag + + cmd.Flags().Var(&putSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&putSecretReq.BytesValue, "bytes-value", putSecretReq.BytesValue, `If specified, value will be stored as bytes.`) + cmd.Flags().StringVar(&putSecretReq.StringValue, "string-value", putSecretReq.StringValue, `If specified, note that the value will be stored in UTF-8 (MB4) form.`) + + cmd.Use = "put-secret SCOPE KEY" + cmd.Short = `Add a secret.` + cmd.Long = `Add a secret. + + Inserts a secret under the provided scope with the given name. If a secret + already exists with the same name, this command overwrites the existing + secret's value. The server encrypts the secret using the secret scope's + encryption settings before storing it. + + You must have WRITE or MANAGE permission on the secret scope. The secret + key must consist of alphanumeric characters, dashes, underscores, and periods, + and cannot exceed 128 characters. The maximum allowed secret value size is 128 + KB. The maximum number of secrets in a given scope is 1000. + + The arguments "string-value" or "bytes-value" specify the type of the secret, + which will determine the value returned when the secret value is requested. + + You can specify the secret value in one of three ways: + * Specify the value as a string using the --string-value flag. + * Input the secret when prompted interactively (single-line secrets). + * Pass the secret via standard input (multi-line secrets). + ` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + bytesValueChanged := cmd.Flags().Changed("bytes-value") + stringValueChanged := cmd.Flags().Changed("string-value") + if bytesValueChanged && stringValueChanged { + return fmt.Errorf("cannot specify both --bytes-value and --string-value") + } + + if cmd.Flags().Changed("json") { + err = putSecretJson.Unmarshal(&putSecretReq) + if err != nil { + return err + } + } else { + putSecretReq.Scope = args[0] + putSecretReq.Key = args[1] + + switch { + case bytesValueChanged: + // Bytes value set; encode as base64. + putSecretReq.BytesValue = base64.StdEncoding.EncodeToString([]byte(putSecretReq.BytesValue)) + case stringValueChanged: + // String value set; nothing to do. + default: + // Neither is specified; read secret value from stdin. + bytes, err := promptSecret(cmd) + if err != nil { + return err + } + putSecretReq.BytesValue = base64.StdEncoding.EncodeToString(bytes) + } + } + + err = w.Secrets.PutSecret(ctx, putSecretReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Potential future follow up to auto complete secret scopes for the first argument. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + return cmd +} + +func promptSecret(cmd *cobra.Command) ([]byte, error) { + // If stdin is a TTY, prompt for the secret. + if !cmdio.IsInTTY(cmd.Context()) { + return io.ReadAll(os.Stdin) + } + + value, err := cmdio.Secret(cmd.Context(), "Please enter your secret value") + if err != nil { + return nil, err + } + + return []byte(value), nil +} diff --git a/cmd/workspace/secrets/secrets.go b/cmd/workspace/secrets/secrets.go index fada4d1fe..5425da90c 100755 --- a/cmd/workspace/secrets/secrets.go +++ b/cmd/workspace/secrets/secrets.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "secrets", - Short: `The Secrets API allows you to manage secrets, secret scopes, and access permissions.`, - Long: `The Secrets API allows you to manage secrets, secret scopes, and access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "secrets", + Short: `The Secrets API allows you to manage secrets, secret scopes, and access permissions.`, + Long: `The Secrets API allows you to manage secrets, secret scopes, and access permissions. Sometimes accessing data requires that you authenticate to external data @@ -27,45 +32,62 @@ var Cmd = &cobra.Command{ Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets.`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create-scope command -var createScopeReq workspace.CreateScope -var createScopeJson flags.JsonFlag -func init() { - Cmd.AddCommand(createScopeCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createScopeOverrides []func( + *cobra.Command, + *workspace.CreateScope, +) + +func newCreateScope() *cobra.Command { + cmd := &cobra.Command{} + + var createScopeReq workspace.CreateScope + var createScopeJson flags.JsonFlag + // TODO: short flags - createScopeCmd.Flags().Var(&createScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: backend_azure_keyvault - createScopeCmd.Flags().StringVar(&createScopeReq.InitialManagePrincipal, "initial-manage-principal", createScopeReq.InitialManagePrincipal, `The principal that is initially granted MANAGE permission to the created scope.`) - createScopeCmd.Flags().Var(&createScopeReq.ScopeBackendType, "scope-backend-type", `The backend type the scope will be created with.`) + cmd.Flags().StringVar(&createScopeReq.InitialManagePrincipal, "initial-manage-principal", createScopeReq.InitialManagePrincipal, `The principal that is initially granted MANAGE permission to the created scope.`) + cmd.Flags().Var(&createScopeReq.ScopeBackendType, "scope-backend-type", `The backend type the scope will be created with.`) -} - -var createScopeCmd = &cobra.Command{ - Use: "create-scope SCOPE", - Short: `Create a new secret scope.`, - Long: `Create a new secret scope. + cmd.Use = "create-scope SCOPE" + cmd.Short = `Create a new secret scope.` + cmd.Long = `Create a new secret scope. The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. The maximum number of scopes - in a workspace is 100.`, + in a workspace is 100.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -83,45 +105,67 @@ var createScopeCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createScopeOverrides { + fn(cmd, &createScopeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateScope()) + }) } // start delete-acl command -var deleteAclReq workspace.DeleteAcl -var deleteAclJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteAclCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteAclOverrides []func( + *cobra.Command, + *workspace.DeleteAcl, +) + +func newDeleteAcl() *cobra.Command { + cmd := &cobra.Command{} + + var deleteAclReq workspace.DeleteAcl + var deleteAclJson flags.JsonFlag + // TODO: short flags - deleteAclCmd.Flags().Var(&deleteAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteAclCmd = &cobra.Command{ - Use: "delete-acl SCOPE PRINCIPAL", - Short: `Delete an ACL.`, - Long: `Delete an ACL. + cmd.Use = "delete-acl SCOPE PRINCIPAL" + cmd.Short = `Delete an ACL.` + cmd.Long = `Delete an ACL. Deletes the given ACL on the given scope. Users must have the MANAGE permission to invoke this API. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope, principal, or ACL exists. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -140,44 +184,66 @@ var deleteAclCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteAclOverrides { + fn(cmd, &deleteAclReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteAcl()) + }) } // start delete-scope command -var deleteScopeReq workspace.DeleteScope -var deleteScopeJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteScopeCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteScopeOverrides []func( + *cobra.Command, + *workspace.DeleteScope, +) + +func newDeleteScope() *cobra.Command { + cmd := &cobra.Command{} + + var deleteScopeReq workspace.DeleteScope + var deleteScopeJson flags.JsonFlag + // TODO: short flags - deleteScopeCmd.Flags().Var(&deleteScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteScopeCmd = &cobra.Command{ - Use: "delete-scope SCOPE", - Short: `Delete a secret scope.`, - Long: `Delete a secret scope. + cmd.Use = "delete-scope SCOPE" + cmd.Short = `Delete a secret scope.` + cmd.Long = `Delete a secret scope. Deletes a secret scope. Throws RESOURCE_DOES_NOT_EXIST if the scope does not exist. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -195,45 +261,67 @@ var deleteScopeCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteScopeOverrides { + fn(cmd, &deleteScopeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteScope()) + }) } // start delete-secret command -var deleteSecretReq workspace.DeleteSecret -var deleteSecretJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteSecretCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteSecretOverrides []func( + *cobra.Command, + *workspace.DeleteSecret, +) + +func newDeleteSecret() *cobra.Command { + cmd := &cobra.Command{} + + var deleteSecretReq workspace.DeleteSecret + var deleteSecretJson flags.JsonFlag + // TODO: short flags - deleteSecretCmd.Flags().Var(&deleteSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteSecretCmd = &cobra.Command{ - Use: "delete-secret SCOPE KEY", - Short: `Delete a secret.`, - Long: `Delete a secret. + cmd.Use = "delete-secret SCOPE KEY" + cmd.Short = `Delete a secret.` + cmd.Long = `Delete a secret. Deletes the secret stored in this secret scope. You must have WRITE or MANAGE permission on the secret scope. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope or secret exists. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -252,40 +340,62 @@ var deleteSecretCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteSecretOverrides { + fn(cmd, &deleteSecretReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteSecret()) + }) } // start get-acl command -var getAclReq workspace.GetAclRequest -func init() { - Cmd.AddCommand(getAclCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getAclOverrides []func( + *cobra.Command, + *workspace.GetAclRequest, +) + +func newGetAcl() *cobra.Command { + cmd := &cobra.Command{} + + var getAclReq workspace.GetAclRequest + // TODO: short flags -} - -var getAclCmd = &cobra.Command{ - Use: "get-acl SCOPE PRINCIPAL", - Short: `Get secret ACL details.`, - Long: `Get secret ACL details. + cmd.Use = "get-acl SCOPE PRINCIPAL" + cmd.Short = `Get secret ACL details.` + cmd.Long = `Get secret ACL details. Gets the details about the given ACL, such as the group and permission. Users must have the MANAGE permission to invoke this API. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -297,40 +407,135 @@ var getAclCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getAclOverrides { + fn(cmd, &getAclReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetAcl()) + }) +} + +// start get-secret command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getSecretOverrides []func( + *cobra.Command, + *workspace.GetSecretRequest, +) + +func newGetSecret() *cobra.Command { + cmd := &cobra.Command{} + + var getSecretReq workspace.GetSecretRequest + + // TODO: short flags + + cmd.Use = "get-secret SCOPE KEY" + cmd.Short = `Get a secret.` + cmd.Long = `Get a secret. + + Gets the bytes representation of a secret value for the specified scope and + key. + + Users need the READ permission to make this call. + + Note that the secret value returned is in bytes. The interpretation of the + bytes is determined by the caller in DBUtils and the type the data is decoded + into. + + Throws PERMISSION_DENIED if the user does not have permission to make this + API call. Throws RESOURCE_DOES_NOT_EXIST if no such secret or secret scope + exists.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getSecretReq.Scope = args[0] + getSecretReq.Key = args[1] + + response, err := w.Secrets.GetSecret(ctx, getSecretReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getSecretOverrides { + fn(cmd, &getSecretReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetSecret()) + }) } // start list-acls command -var listAclsReq workspace.ListAclsRequest -func init() { - Cmd.AddCommand(listAclsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listAclsOverrides []func( + *cobra.Command, + *workspace.ListAclsRequest, +) + +func newListAcls() *cobra.Command { + cmd := &cobra.Command{} + + var listAclsReq workspace.ListAclsRequest + // TODO: short flags -} - -var listAclsCmd = &cobra.Command{ - Use: "list-acls SCOPE", - Short: `Lists ACLs.`, - Long: `Lists ACLs. + cmd.Use = "list-acls SCOPE" + cmd.Short = `Lists ACLs.` + cmd.Long = `Lists ACLs. List the ACLs for a given secret scope. Users must have the MANAGE permission to invoke this API. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -341,32 +546,50 @@ var listAclsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listAclsOverrides { + fn(cmd, &listAclsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListAcls()) + }) } // start list-scopes command -func init() { - Cmd.AddCommand(listScopesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listScopesOverrides []func( + *cobra.Command, +) -} +func newListScopes() *cobra.Command { + cmd := &cobra.Command{} -var listScopesCmd = &cobra.Command{ - Use: "list-scopes", - Short: `List all scopes.`, - Long: `List all scopes. + cmd.Use = "list-scopes" + cmd.Short = `List all scopes.` + cmd.Long = `List all scopes. Lists all secret scopes available in the workspace. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Secrets.ListScopesAll(ctx) @@ -374,25 +597,45 @@ var listScopesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listScopesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListScopes()) + }) } // start list-secrets command -var listSecretsReq workspace.ListSecretsRequest -func init() { - Cmd.AddCommand(listSecretsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSecretsOverrides []func( + *cobra.Command, + *workspace.ListSecretsRequest, +) + +func newListSecrets() *cobra.Command { + cmd := &cobra.Command{} + + var listSecretsReq workspace.ListSecretsRequest + // TODO: short flags -} - -var listSecretsCmd = &cobra.Command{ - Use: "list-secrets SCOPE", - Short: `List secret keys.`, - Long: `List secret keys. + cmd.Use = "list-secrets SCOPE" + cmd.Short = `List secret keys.` + cmd.Long = `List secret keys. Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ @@ -401,15 +644,17 @@ var listSecretsCmd = &cobra.Command{ The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -420,27 +665,47 @@ var listSecretsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSecretsOverrides { + fn(cmd, &listSecretsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListSecrets()) + }) } // start put-acl command -var putAclReq workspace.PutAcl -var putAclJson flags.JsonFlag -func init() { - Cmd.AddCommand(putAclCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var putAclOverrides []func( + *cobra.Command, + *workspace.PutAcl, +) + +func newPutAcl() *cobra.Command { + cmd := &cobra.Command{} + + var putAclReq workspace.PutAcl + var putAclJson flags.JsonFlag + // TODO: short flags - putAclCmd.Flags().Var(&putAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&putAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var putAclCmd = &cobra.Command{ - Use: "put-acl SCOPE PRINCIPAL PERMISSION", - Short: `Create/update an ACL.`, - Long: `Create/update an ACL. + cmd.Use = "put-acl SCOPE PRINCIPAL PERMISSION" + cmd.Short = `Create/update an ACL.` + cmd.Long = `Create/update an ACL. Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. @@ -467,18 +732,20 @@ var putAclCmd = &cobra.Command{ RESOURCE_ALREADY_EXISTS if a permission for the principal already exists. Throws INVALID_PARAMETER_VALUE if the permission or principal is invalid. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -501,10 +768,24 @@ var putAclCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range putAclOverrides { + fn(cmd, &putAclReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPutAcl()) + }) } // end service Secrets diff --git a/cmd/workspace/service-principals/overrides.go b/cmd/workspace/service-principals/overrides.go index c335bead6..185549b7c 100644 --- a/cmd/workspace/service-principals/overrides.go +++ b/cmd/workspace/service-principals/overrides.go @@ -1,9 +1,17 @@ package service_principals -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListServicePrincipalsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.ApplicationId}} {{.DisplayName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/service-principals/service-principals.go b/cmd/workspace/service-principals/service-principals.go index 7dfc390f2..1958dd21b 100755 --- a/cmd/workspace/service-principals/service-principals.go +++ b/cmd/workspace/service-principals/service-principals.go @@ -12,57 +12,79 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "service-principals", - Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, - Long: `Identities for use with jobs, automated tools, and systems such as scripts, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principals", + Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, + Long: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.ServicePrincipal -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.ServicePrincipal + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a service principal.`, - Long: `Create a service principal. + cmd.Use = "create" + cmd.Short = `Create a service principal.` + cmd.Long = `Create a service principal. - Creates a new service principal in the Databricks workspace.`, + Creates a new service principal in the Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -79,31 +101,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteServicePrincipalRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteServicePrincipalRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteServicePrincipalRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a service principal.`, - Long: `Delete a service principal. + cmd.Use = "delete ID" + cmd.Short = `Delete a service principal.` + cmd.Long = `Delete a service principal. - Delete a single service principal in the Databricks workspace.`, + Delete a single service principal in the Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -131,32 +174,53 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetServicePrincipalRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetServicePrincipalRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetServicePrincipalRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get service principal details.`, - Long: `Get service principal details. + cmd.Use = "get ID" + cmd.Short = `Get service principal details.` + cmd.Long = `Get service principal details. Gets the details for a single service principal define in the Databricks - workspace.`, + workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -184,48 +248,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListServicePrincipalsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListServicePrincipalsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListServicePrincipalsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List service principals.`, - Long: `List service principals. + cmd.Use = "list" + cmd.Short = `List service principals.` + cmd.Long = `List service principals. - Gets the set of service principals associated with a Databricks workspace.`, + Gets the set of service principals associated with a Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -242,36 +328,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schemas -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update service principal details.`, - Long: `Update service principal details. + cmd.Use = "patch ID" + cmd.Short = `Update service principal details.` + cmd.Long = `Update service principal details. Partially updates the details of a single service principal in the Databricks - workspace.`, + workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -305,44 +413,65 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.ServicePrincipal -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.ServicePrincipal + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace service principal.`, - Long: `Replace service principal. + cmd.Use = "update ID" + cmd.Short = `Replace service principal.` + cmd.Long = `Replace service principal. Updates the details of a single service principal. - This action replaces the existing service principal with the same name.`, + This action replaces the existing service principal with the same name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -377,10 +506,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service ServicePrincipals diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go index 46c830ebc..e22a38443 100755 --- a/cmd/workspace/serving-endpoints/serving-endpoints.go +++ b/cmd/workspace/serving-endpoints/serving-endpoints.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "serving-endpoints", - Short: `The Serving Endpoints API allows you to create, update, and delete model serving endpoints.`, - Long: `The Serving Endpoints API allows you to create, update, and delete model +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "serving-endpoints", + Short: `The Serving Endpoints API allows you to create, update, and delete model serving endpoints.`, + Long: `The Serving Endpoints API allows you to create, update, and delete model serving endpoints. You can use a serving endpoint to serve models from the Databricks Model @@ -29,35 +34,52 @@ var Cmd = &cobra.Command{ settings to define how requests should be routed to your served models behind an endpoint. Additionally, you can configure the scale of resources that should be applied to each served model.`, - Annotations: map[string]string{ - "package": "serving", - }, + GroupID: "serving", + Annotations: map[string]string{ + "package": "serving", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start build-logs command -var buildLogsReq serving.BuildLogsRequest -func init() { - Cmd.AddCommand(buildLogsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var buildLogsOverrides []func( + *cobra.Command, + *serving.BuildLogsRequest, +) + +func newBuildLogs() *cobra.Command { + cmd := &cobra.Command{} + + var buildLogsReq serving.BuildLogsRequest + // TODO: short flags -} - -var buildLogsCmd = &cobra.Command{ - Use: "build-logs NAME SERVED_MODEL_NAME", - Short: `Retrieve the logs associated with building the model's environment for a given serving endpoint's served model.`, - Long: `Retrieve the logs associated with building the model's environment for a given + cmd.Use = "build-logs NAME SERVED_MODEL_NAME" + cmd.Short = `Retrieve the logs associated with building the model's environment for a given serving endpoint's served model.` + cmd.Long = `Retrieve the logs associated with building the model's environment for a given serving endpoint's served model. - Retrieves the build logs associated with the provided served model.`, + Retrieves the build logs associated with the provided served model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -69,37 +91,57 @@ var buildLogsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range buildLogsOverrides { + fn(cmd, &buildLogsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newBuildLogs()) + }) } // start create command -var createReq serving.CreateServingEndpoint -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *serving.CreateServingEndpoint, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach NOT_UPDATING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) + var createReq serving.CreateServingEndpoint + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach NOT_UPDATING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} + cmd.Use = "create" + cmd.Short = `Create a new serving endpoint.` + cmd.Long = `Create a new serving endpoint.` -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new serving endpoint.`, - Long: `Create a new serving endpoint.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -130,33 +172,55 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq serving.DeleteServingEndpointRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *serving.DeleteServingEndpointRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq serving.DeleteServingEndpointRequest + // TODO: short flags -} + cmd.Use = "delete NAME" + cmd.Short = `Delete a serving endpoint.` + cmd.Long = `Delete a serving endpoint.` -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a serving endpoint.`, - Long: `Delete a serving endpoint.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -167,36 +231,58 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start export-metrics command -var exportMetricsReq serving.ExportMetricsRequest -func init() { - Cmd.AddCommand(exportMetricsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var exportMetricsOverrides []func( + *cobra.Command, + *serving.ExportMetricsRequest, +) + +func newExportMetrics() *cobra.Command { + cmd := &cobra.Command{} + + var exportMetricsReq serving.ExportMetricsRequest + // TODO: short flags -} - -var exportMetricsCmd = &cobra.Command{ - Use: "export-metrics NAME", - Short: `Retrieve the metrics associated with a serving endpoint.`, - Long: `Retrieve the metrics associated with a serving endpoint. + cmd.Use = "export-metrics NAME" + cmd.Short = `Retrieve the metrics associated with a serving endpoint.` + cmd.Long = `Retrieve the metrics associated with a serving endpoint. Retrieves the metrics associated with the provided serving endpoint in either - Prometheus or OpenMetrics exposition format.`, + Prometheus or OpenMetrics exposition format.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -207,35 +293,57 @@ var exportMetricsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range exportMetricsOverrides { + fn(cmd, &exportMetricsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExportMetrics()) + }) } // start get command -var getReq serving.GetServingEndpointRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *serving.GetServingEndpointRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq serving.GetServingEndpointRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a single serving endpoint.`, - Long: `Get a single serving endpoint. + cmd.Use = "get NAME" + cmd.Short = `Get a single serving endpoint.` + cmd.Long = `Get a single serving endpoint. - Retrieves the details for a single serving endpoint.`, + Retrieves the details for a single serving endpoint.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -246,27 +354,168 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *serving.GetServingEndpointPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq serving.GetServingEndpointPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels SERVING_ENDPOINT_ID" + cmd.Short = `Get serving endpoint permission levels.` + cmd.Long = `Get serving endpoint permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *serving.GetServingEndpointPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq serving.GetServingEndpointPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions SERVING_ENDPOINT_ID" + cmd.Short = `Get serving endpoint permissions.` + cmd.Long = `Get serving endpoint permissions. + + Gets the permissions of a serving endpoint. Serving endpoints can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Retrieve all serving endpoints.`, - Long: `Retrieve all serving endpoints.`, + cmd.Use = "list" + cmd.Short = `Retrieve all serving endpoints.` + cmd.Long = `Retrieve all serving endpoints.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.ServingEndpoints.ListAll(ctx) @@ -274,36 +523,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start logs command -var logsReq serving.LogsRequest -func init() { - Cmd.AddCommand(logsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logsOverrides []func( + *cobra.Command, + *serving.LogsRequest, +) + +func newLogs() *cobra.Command { + cmd := &cobra.Command{} + + var logsReq serving.LogsRequest + // TODO: short flags -} - -var logsCmd = &cobra.Command{ - Use: "logs NAME SERVED_MODEL_NAME", - Short: `Retrieve the most recent log lines associated with a given serving endpoint's served model.`, - Long: `Retrieve the most recent log lines associated with a given serving endpoint's + cmd.Use = "logs NAME SERVED_MODEL_NAME" + cmd.Short = `Retrieve the most recent log lines associated with a given serving endpoint's served model.` + cmd.Long = `Retrieve the most recent log lines associated with a given serving endpoint's served model. - Retrieves the service logs associated with the provided served model.`, + Retrieves the service logs associated with the provided served model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -315,33 +586,55 @@ var logsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logsOverrides { + fn(cmd, &logsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogs()) + }) } // start query command -var queryReq serving.QueryRequest -func init() { - Cmd.AddCommand(queryCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var queryOverrides []func( + *cobra.Command, + *serving.QueryRequest, +) + +func newQuery() *cobra.Command { + cmd := &cobra.Command{} + + var queryReq serving.QueryRequest + // TODO: short flags -} + cmd.Use = "query NAME" + cmd.Short = `Query a serving endpoint with provided model input.` + cmd.Long = `Query a serving endpoint with provided model input.` -var queryCmd = &cobra.Command{ - Use: "query NAME", - Short: `Query a serving endpoint with provided model input.`, - Long: `Query a serving endpoint with provided model input.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -352,44 +645,136 @@ var queryCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range queryOverrides { + fn(cmd, &queryReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newQuery()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *serving.ServingEndpointPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq serving.ServingEndpointPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions SERVING_ENDPOINT_ID" + cmd.Short = `Set serving endpoint permissions.` + cmd.Long = `Set serving endpoint permissions. + + Sets permissions on a serving endpoint. Serving endpoints can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start update-config command -var updateConfigReq serving.EndpointCoreConfigInput -var updateConfigJson flags.JsonFlag -var updateConfigSkipWait bool -var updateConfigTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateConfigOverrides []func( + *cobra.Command, + *serving.EndpointCoreConfigInput, +) -func init() { - Cmd.AddCommand(updateConfigCmd) +func newUpdateConfig() *cobra.Command { + cmd := &cobra.Command{} - updateConfigCmd.Flags().BoolVar(&updateConfigSkipWait, "no-wait", updateConfigSkipWait, `do not wait to reach NOT_UPDATING state`) - updateConfigCmd.Flags().DurationVar(&updateConfigTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) + var updateConfigReq serving.EndpointCoreConfigInput + var updateConfigJson flags.JsonFlag + + var updateConfigSkipWait bool + var updateConfigTimeout time.Duration + + cmd.Flags().BoolVar(&updateConfigSkipWait, "no-wait", updateConfigSkipWait, `do not wait to reach NOT_UPDATING state`) + cmd.Flags().DurationVar(&updateConfigTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) // TODO: short flags - updateConfigCmd.Flags().Var(&updateConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: traffic_config -} - -var updateConfigCmd = &cobra.Command{ - Use: "update-config", - Short: `Update a serving endpoint with a new config.`, - Long: `Update a serving endpoint with a new config. + cmd.Use = "update-config" + cmd.Short = `Update a serving endpoint with a new config.` + cmd.Long = `Update a serving endpoint with a new config. Updates any combination of the serving endpoint's served models, the compute configuration of those served models, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the - current update completes or fails.`, + current update completes or fails.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -420,10 +805,96 @@ var updateConfigCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateConfigOverrides { + fn(cmd, &updateConfigReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateConfig()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *serving.ServingEndpointPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq serving.ServingEndpointPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions SERVING_ENDPOINT_ID" + cmd.Short = `Update serving endpoint permissions.` + cmd.Long = `Update serving endpoint permissions. + + Updates the permissions on a serving endpoint. Serving endpoints can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service ServingEndpoints diff --git a/cmd/workspace/shares/shares.go b/cmd/workspace/shares/shares.go index 2580b060e..cf96b8b3a 100755 --- a/cmd/workspace/shares/shares.go +++ b/cmd/workspace/shares/shares.go @@ -10,47 +10,73 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "shares", - Short: `Databricks Shares REST API.`, - Long: `Databricks Shares REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "shares", + Short: `A share is a container instantiated with :method:shares/create.`, + Long: `A share is a container instantiated with :method:shares/create. Once created + you can iteratively register a collection of existing data assets defined + within the metastore using :method:shares/update. You can register data assets + under their original name, qualified by their original schema, or provide + alternate exposed names.`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateShare -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateShare, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateShare + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a share.`, - Long: `Create a share. + cmd.Use = "create NAME" + cmd.Short = `Create a share.` + cmd.Long = `Create a share. Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the - **CREATE_SHARE** privilege on the metastore.`, + **CREATE_SHARE** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -68,36 +94,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteShareRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteShareRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteShareRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a share.`, - Long: `Delete a share. + cmd.Use = "delete NAME" + cmd.Short = `Delete a share.` + cmd.Long = `Delete a share. Deletes a data object share from the metastore. The caller must be an owner of - the share.`, + the share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -108,38 +156,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetShareRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetShareRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetShareRequest + // TODO: short flags - getCmd.Flags().BoolVar(&getReq.IncludeSharedData, "include-shared-data", getReq.IncludeSharedData, `Query for data to include in the share.`) + cmd.Flags().BoolVar(&getReq.IncludeSharedData, "include-shared-data", getReq.IncludeSharedData, `Query for data to include in the share.`) -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a share.`, - Long: `Get a share. + cmd.Use = "get NAME" + cmd.Short = `Get a share.` + cmd.Long = `Get a share. Gets a data object share from the metastore. The caller must be a metastore - admin or the owner of the share.`, + admin or the owner of the share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -150,31 +220,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List shares.`, - Long: `List shares. + cmd.Use = "list" + cmd.Short = `List shares.` + cmd.Long = `List shares. Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific - ordering of the elements in the array.`, + ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Shares.ListAll(ctx) @@ -182,36 +270,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start share-permissions command -var sharePermissionsReq sharing.SharePermissionsRequest -func init() { - Cmd.AddCommand(sharePermissionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var sharePermissionsOverrides []func( + *cobra.Command, + *sharing.SharePermissionsRequest, +) + +func newSharePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var sharePermissionsReq sharing.SharePermissionsRequest + // TODO: short flags -} - -var sharePermissionsCmd = &cobra.Command{ - Use: "share-permissions NAME", - Short: `Get permissions.`, - Long: `Get permissions. + cmd.Use = "share-permissions NAME" + cmd.Short = `Get permissions.` + cmd.Long = `Get permissions. Gets the permissions for a data share from the metastore. The caller must be a - metastore admin or the owner of the share.`, + metastore admin or the owner of the share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -222,32 +332,52 @@ var sharePermissionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range sharePermissionsOverrides { + fn(cmd, &sharePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSharePermissions()) + }) } // start update command -var updateReq sharing.UpdateShare -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateShare, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateShare + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the share.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of share.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the share.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of share.`) // TODO: array: updates -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a share.`, - Long: `Update a share. + cmd.Use = "update NAME" + cmd.Short = `Update a share.` + cmd.Long = `Update a share. Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. @@ -262,18 +392,20 @@ var updateCmd = &cobra.Command{ indefinitely for recipients to be able to access the table. Typically, you should use a group as the share owner. - Table removals through **update** do not require additional privileges.`, + Table removals through **update** do not require additional privileges.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -291,43 +423,65 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // start update-permissions command -var updatePermissionsReq sharing.UpdateSharePermissions -var updatePermissionsJson flags.JsonFlag -func init() { - Cmd.AddCommand(updatePermissionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *sharing.UpdateSharePermissions, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq sharing.UpdateSharePermissions + var updatePermissionsJson flags.JsonFlag + // TODO: short flags - updatePermissionsCmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: changes -} - -var updatePermissionsCmd = &cobra.Command{ - Use: "update-permissions NAME", - Short: `Update permissions.`, - Long: `Update permissions. + cmd.Use = "update-permissions NAME" + cmd.Short = `Update permissions.` + cmd.Long = `Update permissions. Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. For new recipient grants, the user must also be the owner of the recipients. - recipient revocations do not require additional privileges.`, + recipient revocations do not require additional privileges.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -344,10 +498,24 @@ var updatePermissionsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Shares diff --git a/cmd/workspace/storage-credentials/overrides.go b/cmd/workspace/storage-credentials/overrides.go index 8bce9ffa2..37c18ca6c 100644 --- a/cmd/workspace/storage-credentials/overrides.go +++ b/cmd/workspace/storage-credentials/overrides.go @@ -1,10 +1,17 @@ package storage_credentials -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "Credentials"}} {{range .}}{{.Id|green}} {{.Name|cyan}} {{if .AwsIamRole}}{{.AwsIamRole.RoleArn}}{{end}}{{if .AzureServicePrincipal}}{{.AzureServicePrincipal.ApplicationId}}{{end}}{{if .GcpServiceAccountKey}}{{.Email}}{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/storage-credentials/storage-credentials.go b/cmd/workspace/storage-credentials/storage-credentials.go index bbd7dd581..b5dd5141b 100755 --- a/cmd/workspace/storage-credentials/storage-credentials.go +++ b/cmd/workspace/storage-credentials/storage-credentials.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "storage-credentials", - Short: `A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.`, - Long: `A storage credential represents an authentication and authorization mechanism +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "storage-credentials", + Short: `A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.`, + Long: `A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not have access to a storage @@ -28,34 +33,49 @@ var Cmd = &cobra.Command{ To create storage credentials, you must be a Databricks account admin. The account admin who creates the storage credential can delegate ownership to another user or group to manage permissions on it.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateStorageCredential -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateStorageCredential, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateStorageCredential + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_iam_role // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Comment associated with the credential.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Comment associated with the credential.`) // TODO: output-only field - createCmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) - createCmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Supplying true to this argument skips validation of the created credential.`) + cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) + cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Supplying true to this argument skips validation of the created credential.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a storage credential.`, - Long: `Create a storage credential. + cmd.Use = "create NAME" + cmd.Short = `Create a storage credential.` + cmd.Long = `Create a storage credential. Creates a new storage credential. The request object is specific to the cloud: @@ -64,18 +84,20 @@ var createCmd = &cobra.Command{ **DatabricksGcpServiceAccount** for GCP managed credentials. The caller must be a metastore admin and have the - **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.`, + **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -93,34 +115,55 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteStorageCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteStorageCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteStorageCredentialRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external locations or external tables.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external locations or external tables.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a credential.`, - Long: `Delete a credential. + cmd.Use = "delete NAME" + cmd.Short = `Delete a credential.` + cmd.Long = `Delete a credential. Deletes a storage credential from the metastore. The caller must be an owner - of the storage credential.`, + of the storage credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -148,33 +191,54 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetStorageCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetStorageCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetStorageCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a credential.`, - Long: `Get a credential. + cmd.Use = "get NAME" + cmd.Short = `Get a credential.` + cmd.Long = `Get a credential. Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the - storage credential.`, + storage credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -202,33 +266,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List credentials.`, - Long: `List credentials. + cmd.Use = "list" + cmd.Short = `List credentials.` + cmd.Long = `List credentials. Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, all storage credentials will be retrieved. There is no guarantee of a specific ordering of - the elements in the array.`, + the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.StorageCredentials.ListAll(ctx) @@ -236,46 +318,67 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateStorageCredential -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateStorageCredential, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateStorageCredential + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_iam_role // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Comment associated with the credential.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Comment associated with the credential.`) // TODO: output-only field - updateCmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if there are dependent external locations or external tables.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The credential name.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of credential.`) - updateCmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) - updateCmd.Flags().BoolVar(&updateReq.SkipValidation, "skip-validation", updateReq.SkipValidation, `Supplying true to this argument skips validation of the updated credential.`) + cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if there are dependent external locations or external tables.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The credential name.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of credential.`) + cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) + cmd.Flags().BoolVar(&updateReq.SkipValidation, "skip-validation", updateReq.SkipValidation, `Supplying true to this argument skips validation of the updated credential.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a credential.`, - Long: `Update a credential. + cmd.Use = "update NAME" + cmd.Short = `Update a credential.` + cmd.Long = `Update a credential. Updates a storage credential on the metastore. The caller must be the owner of the storage credential or a metastore admin. If the caller is a metastore - admin, only the __owner__ credential can be changed.`, + admin, only the __owner__ credential can be changed.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -310,36 +413,56 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // start validate command -var validateReq catalog.ValidateStorageCredential -var validateJson flags.JsonFlag -func init() { - Cmd.AddCommand(validateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var validateOverrides []func( + *cobra.Command, + *catalog.ValidateStorageCredential, +) + +func newValidate() *cobra.Command { + cmd := &cobra.Command{} + + var validateReq catalog.ValidateStorageCredential + var validateJson flags.JsonFlag + // TODO: short flags - validateCmd.Flags().Var(&validateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&validateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_iam_role // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal // TODO: output-only field - validateCmd.Flags().StringVar(&validateReq.ExternalLocationName, "external-location-name", validateReq.ExternalLocationName, `The name of an existing external location to validate.`) - validateCmd.Flags().BoolVar(&validateReq.ReadOnly, "read-only", validateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) + cmd.Flags().StringVar(&validateReq.ExternalLocationName, "external-location-name", validateReq.ExternalLocationName, `The name of an existing external location to validate.`) + cmd.Flags().BoolVar(&validateReq.ReadOnly, "read-only", validateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) // TODO: any: storage_credential_name - validateCmd.Flags().StringVar(&validateReq.Url, "url", validateReq.Url, `The external location url to validate.`) + cmd.Flags().StringVar(&validateReq.Url, "url", validateReq.Url, `The external location url to validate.`) -} - -var validateCmd = &cobra.Command{ - Use: "validate", - Short: `Validate a storage credential.`, - Long: `Validate a storage credential. + cmd.Use = "validate" + cmd.Short = `Validate a storage credential.` + cmd.Long = `Validate a storage credential. Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used @@ -352,18 +475,20 @@ var validateCmd = &cobra.Command{ The caller must be a metastore admin or the storage credential owner or have the **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage - credential.`, + credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -380,10 +505,24 @@ var validateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range validateOverrides { + fn(cmd, &validateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newValidate()) + }) } // end service StorageCredentials diff --git a/cmd/workspace/system-schemas/system-schemas.go b/cmd/workspace/system-schemas/system-schemas.go index fed5e5e52..2dd729f1b 100755 --- a/cmd/workspace/system-schemas/system-schemas.go +++ b/cmd/workspace/system-schemas/system-schemas.go @@ -11,44 +11,66 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "system-schemas", - Short: `A system schema is a schema that lives within the system catalog.`, - Long: `A system schema is a schema that lives within the system catalog. A system +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "system-schemas", + Short: `A system schema is a schema that lives within the system catalog.`, + Long: `A system schema is a schema that lives within the system catalog. A system schema may contain information about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, - // This service is being previewed; hide from help output. - Hidden: true, + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start disable command -var disableReq catalog.DisableRequest -func init() { - Cmd.AddCommand(disableCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var disableOverrides []func( + *cobra.Command, + *catalog.DisableRequest, +) + +func newDisable() *cobra.Command { + cmd := &cobra.Command{} + + var disableReq catalog.DisableRequest + // TODO: short flags -} - -var disableCmd = &cobra.Command{ - Use: "disable METASTORE_ID SCHEMA_NAME", - Short: `Disable a system schema.`, - Long: `Disable a system schema. + cmd.Use = "disable METASTORE_ID SCHEMA_NAME" + cmd.Short = `Disable a system schema.` + cmd.Long = `Disable a system schema. Disables the system schema and removes it from the system catalog. The caller - must be an account admin or a metastore admin.`, + must be an account admin or a metastore admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -63,36 +85,58 @@ var disableCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range disableOverrides { + fn(cmd, &disableReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDisable()) + }) } // start enable command -var enableReq catalog.EnableRequest -func init() { - Cmd.AddCommand(enableCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var enableOverrides []func( + *cobra.Command, + *catalog.EnableRequest, +) + +func newEnable() *cobra.Command { + cmd := &cobra.Command{} + + var enableReq catalog.EnableRequest + // TODO: short flags -} - -var enableCmd = &cobra.Command{ - Use: "enable METASTORE_ID SCHEMA_NAME", - Short: `Enable a system schema.`, - Long: `Enable a system schema. + cmd.Use = "enable METASTORE_ID SCHEMA_NAME" + cmd.Short = `Enable a system schema.` + cmd.Long = `Enable a system schema. Enables the system schema and adds it to the system catalog. The caller must - be an account admin or a metastore admin.`, + be an account admin or a metastore admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -107,36 +151,58 @@ var enableCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range enableOverrides { + fn(cmd, &enableReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEnable()) + }) } // start list command -var listReq catalog.ListSystemSchemasRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListSystemSchemasRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListSystemSchemasRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list METASTORE_ID", - Short: `List system schemas.`, - Long: `List system schemas. + cmd.Use = "list METASTORE_ID" + cmd.Short = `List system schemas.` + cmd.Long = `List system schemas. Gets an array of system schemas for a metastore. The caller must be an account - admin or a metastore admin.`, + admin or a metastore admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -147,10 +213,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service SystemSchemas diff --git a/cmd/workspace/table-constraints/table-constraints.go b/cmd/workspace/table-constraints/table-constraints.go index d9588b8fc..023846a65 100755 --- a/cmd/workspace/table-constraints/table-constraints.go +++ b/cmd/workspace/table-constraints/table-constraints.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "table-constraints", - Short: `Primary key and foreign key constraints encode relationships between fields in tables.`, - Long: `Primary key and foreign key constraints encode relationships between fields in +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "table-constraints", + Short: `Primary key and foreign key constraints encode relationships between fields in tables.`, + Long: `Primary key and foreign key constraints encode relationships between fields in tables. Primary and foreign keys are informational only and are not enforced. Foreign @@ -28,26 +33,41 @@ var Cmd = &cobra.Command{ You can declare primary keys and foreign keys as part of the table specification during table creation. You can also add or drop constraints on existing tables.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateTableConstraint -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateTableConstraint, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateTableConstraint + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a table constraint.`, - Long: `Create a table constraint. + cmd.Use = "create" + cmd.Short = `Create a table constraint.` + cmd.Long = `Create a table constraint. Creates a new table constraint. @@ -58,11 +78,12 @@ var createCmd = &cobra.Command{ __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the owner of the referenced parent - table.`, + table.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -80,25 +101,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteTableConstraintRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteTableConstraintRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteTableConstraintRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME CONSTRAINT_NAME CASCADE", - Short: `Delete a table constraint.`, - Long: `Delete a table constraint. + cmd.Use = "delete FULL_NAME CONSTRAINT_NAME CASCADE" + cmd.Short = `Delete a table constraint.` + cmd.Long = `Delete a table constraint. Deletes a table constraint. @@ -108,15 +149,17 @@ var deleteCmd = &cobra.Command{ schema, and be the owner of the table. - if __cascade__ argument is **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG** privilege on the table's catalog, the **USE_SCHEMA** privilege - on the table's schema, and be the owner of the table.`, + on the table's schema, and be the owner of the table.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -132,10 +175,24 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // end service TableConstraints diff --git a/cmd/workspace/tables/overrides.go b/cmd/workspace/tables/overrides.go index ed9c86ed5..35fc351a4 100644 --- a/cmd/workspace/tables/overrides.go +++ b/cmd/workspace/tables/overrides.go @@ -1,10 +1,18 @@ package tables -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *catalog.ListTablesRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Full Name"}} {{header "Table Type"}} {{range .}}{{.FullName|green}} {{blue "%s" .TableType}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/tables/tables.go b/cmd/workspace/tables/tables.go index 9ff653b99..53a153fcf 100755 --- a/cmd/workspace/tables/tables.go +++ b/cmd/workspace/tables/tables.go @@ -11,10 +11,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "tables", - Short: `A table resides in the third layer of Unity Catalog’s three-level namespace.`, - Long: `A table resides in the third layer of Unity Catalog’s three-level namespace. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "tables", + Short: `A table resides in the third layer of Unity Catalog’s three-level namespace.`, + Long: `A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of data. To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT @@ -23,34 +28,50 @@ var Cmd = &cobra.Command{ A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table (rather than a managed or external table).`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete command -var deleteReq catalog.DeleteTableRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteTableRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteTableRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME", - Short: `Delete a table.`, - Long: `Delete a table. + cmd.Use = "delete FULL_NAME" + cmd.Short = `Delete a table.` + cmd.Long = `Delete a table. Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the - **USE_SCHEMA** privilege on the parent schema.`, + **USE_SCHEMA** privilege on the parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,37 +99,58 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetTableRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetTableRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetTableRequest + // TODO: short flags - getCmd.Flags().BoolVar(&getReq.IncludeDeltaMetadata, "include-delta-metadata", getReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) + cmd.Flags().BoolVar(&getReq.IncludeDeltaMetadata, "include-delta-metadata", getReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) -} - -var getCmd = &cobra.Command{ - Use: "get FULL_NAME", - Short: `Get a table.`, - Long: `Get a table. + cmd.Use = "get FULL_NAME" + cmd.Short = `Get a table.` + cmd.Long = `Get a table. Gets a table from the metastore for a specific catalog and schema. The caller must be a metastore admin, be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, or be the owner of the table and have the - **SELECT** privilege on it as well.`, + **SELECT** privilege on it as well.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -136,44 +178,66 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListTablesRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListTablesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListTablesRequest + // TODO: short flags - listCmd.Flags().BoolVar(&listReq.IncludeDeltaMetadata, "include-delta-metadata", listReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) - listCmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of tables to return (page length).`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + cmd.Flags().BoolVar(&listReq.IncludeDeltaMetadata, "include-delta-metadata", listReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of tables to return (page length).`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME SCHEMA_NAME", - Short: `List tables.`, - Long: `List tables. + cmd.Use = "list CATALOG_NAME SCHEMA_NAME" + cmd.Short = `List tables.` + cmd.Long = `List tables. Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a - specific ordering of the elements in the array.`, + specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -185,30 +249,50 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-summaries command -var listSummariesReq catalog.ListSummariesRequest -func init() { - Cmd.AddCommand(listSummariesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSummariesOverrides []func( + *cobra.Command, + *catalog.ListSummariesRequest, +) + +func newListSummaries() *cobra.Command { + cmd := &cobra.Command{} + + var listSummariesReq catalog.ListSummariesRequest + // TODO: short flags - listSummariesCmd.Flags().IntVar(&listSummariesReq.MaxResults, "max-results", listSummariesReq.MaxResults, `Maximum number of tables to return (page length).`) - listSummariesCmd.Flags().StringVar(&listSummariesReq.PageToken, "page-token", listSummariesReq.PageToken, `Opaque token to send for the next page of results (pagination).`) - listSummariesCmd.Flags().StringVar(&listSummariesReq.SchemaNamePattern, "schema-name-pattern", listSummariesReq.SchemaNamePattern, `A sql LIKE pattern (% and _) for schema names.`) - listSummariesCmd.Flags().StringVar(&listSummariesReq.TableNamePattern, "table-name-pattern", listSummariesReq.TableNamePattern, `A sql LIKE pattern (% and _) for table names.`) + cmd.Flags().IntVar(&listSummariesReq.MaxResults, "max-results", listSummariesReq.MaxResults, `Maximum number of tables to return (page length).`) + cmd.Flags().StringVar(&listSummariesReq.PageToken, "page-token", listSummariesReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + cmd.Flags().StringVar(&listSummariesReq.SchemaNamePattern, "schema-name-pattern", listSummariesReq.SchemaNamePattern, `A sql LIKE pattern (% and _) for schema names.`) + cmd.Flags().StringVar(&listSummariesReq.TableNamePattern, "table-name-pattern", listSummariesReq.TableNamePattern, `A sql LIKE pattern (% and _) for table names.`) -} - -var listSummariesCmd = &cobra.Command{ - Use: "list-summaries CATALOG_NAME", - Short: `List table summaries.`, - Long: `List table summaries. + cmd.Use = "list-summaries CATALOG_NAME" + cmd.Short = `List table summaries.` + cmd.Long = `List table summaries. Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: @@ -220,11 +304,12 @@ var listSummariesCmd = &cobra.Command{ or **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the **USE_CATALOG** privilege on the parent catalog. - There is no guarantee of a specific ordering of the elements in the array.`, + There is no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -252,10 +337,106 @@ var listSummariesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSummariesOverrides { + fn(cmd, &listSummariesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListSummaries()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateTableRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateTableRequest + + // TODO: short flags + + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, ``) + + cmd.Use = "update FULL_NAME" + cmd.Short = `Update a table owner.` + cmd.Long = `Update a table owner. + + Change the owner of the table. The caller must be the owner of the parent + catalog, have the **USE_CATALOG** privilege on the parent catalog and be the + owner of the parent schema, or be the owner of the table and have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." + names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the table") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the table") + } + updateReq.FullName = args[0] + + err = w.Tables.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Tables diff --git a/cmd/workspace/token-management/overrides.go b/cmd/workspace/token-management/overrides.go index 2070e2a2b..46967d37a 100644 --- a/cmd/workspace/token-management/overrides.go +++ b/cmd/workspace/token-management/overrides.go @@ -1,10 +1,18 @@ package token_management -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *settings.ListTokenManagementRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Created By"}} {{header "Comment"}} {{range .}}{{.TokenId|green}} {{.CreatedByUsername|cyan}} {{.Comment|cyan}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/token-management/token-management.go b/cmd/workspace/token-management/token-management.go index b5cc542c1..b934e2640 100755 --- a/cmd/workspace/token-management/token-management.go +++ b/cmd/workspace/token-management/token-management.go @@ -12,47 +12,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "token-management", - Short: `Enables administrators to get all tokens and delete tokens for other users.`, - Long: `Enables administrators to get all tokens and delete tokens for other users. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "token-management", + Short: `Enables administrators to get all tokens and delete tokens for other users.`, + Long: `Enables administrators to get all tokens and delete tokens for other users. Admins can either get every token, get a specific token by ID, or get all tokens for a particular user.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create-obo-token command -var createOboTokenReq settings.CreateOboTokenRequest -var createOboTokenJson flags.JsonFlag -func init() { - Cmd.AddCommand(createOboTokenCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOboTokenOverrides []func( + *cobra.Command, + *settings.CreateOboTokenRequest, +) + +func newCreateOboToken() *cobra.Command { + cmd := &cobra.Command{} + + var createOboTokenReq settings.CreateOboTokenRequest + var createOboTokenJson flags.JsonFlag + // TODO: short flags - createOboTokenCmd.Flags().Var(&createOboTokenJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createOboTokenJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createOboTokenCmd.Flags().StringVar(&createOboTokenReq.Comment, "comment", createOboTokenReq.Comment, `Comment that describes the purpose of the token.`) + cmd.Flags().StringVar(&createOboTokenReq.Comment, "comment", createOboTokenReq.Comment, `Comment that describes the purpose of the token.`) -} - -var createOboTokenCmd = &cobra.Command{ - Use: "create-obo-token APPLICATION_ID LIFETIME_SECONDS", - Short: `Create on-behalf token.`, - Long: `Create on-behalf token. + cmd.Use = "create-obo-token APPLICATION_ID LIFETIME_SECONDS" + cmd.Short = `Create on-behalf token.` + cmd.Long = `Create on-behalf token. - Creates a token on behalf of a service principal.`, + Creates a token on behalf of a service principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,31 +96,52 @@ var createOboTokenCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOboTokenOverrides { + fn(cmd, &createOboTokenReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateOboToken()) + }) } // start delete command -var deleteReq settings.DeleteTokenManagementRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteTokenManagementRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteTokenManagementRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete TOKEN_ID", - Short: `Delete a token.`, - Long: `Delete a token. + cmd.Use = "delete TOKEN_ID" + cmd.Short = `Delete a token.` + cmd.Long = `Delete a token. - Deletes a token, specified by its ID.`, + Deletes a token, specified by its ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -126,31 +169,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq settings.GetTokenManagementRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetTokenManagementRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetTokenManagementRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get TOKEN_ID", - Short: `Get token info.`, - Long: `Get token info. + cmd.Use = "get TOKEN_ID" + cmd.Short = `Get token info.` + cmd.Long = `Get token info. - Gets information about a token, specified by its ID.`, + Gets information about a token, specified by its ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -178,43 +242,162 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permission-levels" + cmd.Short = `Get token permission levels.` + cmd.Long = `Get token permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.TokenManagement.GetPermissionLevels(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permissions" + cmd.Short = `Get token permissions.` + cmd.Long = `Get token permissions. + + Gets the permissions of all tokens. Tokens can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.TokenManagement.GetPermissions(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -var listReq settings.ListTokenManagementRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *settings.ListTokenManagementRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq settings.ListTokenManagementRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.CreatedById, "created-by-id", listReq.CreatedById, `User ID of the user that created the token.`) - listCmd.Flags().StringVar(&listReq.CreatedByUsername, "created-by-username", listReq.CreatedByUsername, `Username of the user that created the token.`) + cmd.Flags().StringVar(&listReq.CreatedById, "created-by-id", listReq.CreatedById, `User ID of the user that created the token.`) + cmd.Flags().StringVar(&listReq.CreatedByUsername, "created-by-username", listReq.CreatedByUsername, `Username of the user that created the token.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List all tokens.`, - Long: `List all tokens. + cmd.Use = "list" + cmd.Short = `List all tokens.` + cmd.Long = `List all tokens. - Lists all tokens associated with the specified workspace or user.`, + Lists all tokens associated with the specified workspace or user.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -231,10 +414,174 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *settings.TokenPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq settings.TokenPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions" + cmd.Short = `Set token permissions.` + cmd.Long = `Set token permissions. + + Sets permissions on all tokens. Tokens can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.TokenManagement.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *settings.TokenPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq settings.TokenPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions" + cmd.Short = `Update token permissions.` + cmd.Long = `Update token permissions. + + Updates the permissions on all tokens. Tokens can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.TokenManagement.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service TokenManagement diff --git a/cmd/workspace/tokens/overrides.go b/cmd/workspace/tokens/overrides.go index b5673c0e9..09c51758e 100644 --- a/cmd/workspace/tokens/overrides.go +++ b/cmd/workspace/tokens/overrides.go @@ -1,10 +1,17 @@ package tokens -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Expiry time"}} {{header "Comment"}} {{range .}}{{.TokenId|green}} {{cyan "%d" .ExpiryTime}} {{.Comment|cyan}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/tokens/tokens.go b/cmd/workspace/tokens/tokens.go index c121793b6..eee64c976 100755 --- a/cmd/workspace/tokens/tokens.go +++ b/cmd/workspace/tokens/tokens.go @@ -12,50 +12,72 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "tokens", - Short: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`, - Long: `The Token API allows you to create, list, and revoke tokens that can be used +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "tokens", + Short: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`, + Long: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq settings.CreateTokenRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *settings.CreateTokenRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq settings.CreateTokenRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Optional description to attach to the token.`) - createCmd.Flags().Int64Var(&createReq.LifetimeSeconds, "lifetime-seconds", createReq.LifetimeSeconds, `The lifetime of the token, in seconds.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Optional description to attach to the token.`) + cmd.Flags().Int64Var(&createReq.LifetimeSeconds, "lifetime-seconds", createReq.LifetimeSeconds, `The lifetime of the token, in seconds.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a user token.`, - Long: `Create a user token. + cmd.Use = "create" + cmd.Short = `Create a user token.` + cmd.Long = `Create a user token. Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns - an error **QUOTA_EXCEEDED**.`, + an error **QUOTA_EXCEEDED**.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -72,36 +94,57 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq settings.RevokeTokenRequest -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.RevokeTokenRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.RevokeTokenRequest + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete TOKEN_ID", - Short: `Revoke token.`, - Long: `Revoke token. + cmd.Use = "delete TOKEN_ID" + cmd.Short = `Revoke token.` + cmd.Long = `Revoke token. Revokes an access token. If a token with the specified ID is not valid, this call returns an error - **RESOURCE_DOES_NOT_EXIST**.`, + **RESOURCE_DOES_NOT_EXIST**.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -136,29 +179,47 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List tokens.`, - Long: `List tokens. + cmd.Use = "list" + cmd.Short = `List tokens.` + cmd.Long = `List tokens. - Lists all the valid tokens for a user-workspace pair.`, + Lists all the valid tokens for a user-workspace pair.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Tokens.ListAll(ctx) @@ -166,10 +227,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Tokens diff --git a/cmd/workspace/users/overrides.go b/cmd/workspace/users/overrides.go index 45447a0ae..a985ccf8c 100644 --- a/cmd/workspace/users/overrides.go +++ b/cmd/workspace/users/overrides.go @@ -1,10 +1,18 @@ package users -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListUsersRequest) { listReq.Attributes = "id,userName,groups,active" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.UserName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/users/users.go b/cmd/workspace/users/users.go index 570a8f79c..b1a8b0572 100755 --- a/cmd/workspace/users/users.go +++ b/cmd/workspace/users/users.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "users", - Short: `User identities recognized by Databricks and represented by email addresses.`, - Long: `User identities recognized by Databricks and represented by email addresses. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "users", + Short: `User identities recognized by Databricks and represented by email addresses.`, + Long: `User identities recognized by Databricks and represented by email addresses. Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks workspace. SCIM @@ -26,51 +31,68 @@ var Cmd = &cobra.Command{ provider and that user’s account will also be removed from Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.User -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.User, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.User + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - createCmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new user.`, - Long: `Create a new user. + cmd.Use = "create" + cmd.Short = `Create a new user.` + cmd.Long = `Create a new user. Creates a new user in the Databricks workspace. This new user will also be - added to the Databricks account.`, + added to the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -87,32 +109,53 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteUserRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteUserRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteUserRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a user.`, - Long: `Delete a user. + cmd.Use = "delete ID" + cmd.Short = `Delete a user.` + cmd.Long = `Delete a user. Deletes a user. Deleting a user from a Databricks workspace also removes - objects associated with the user.`, + objects associated with the user.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -140,31 +183,52 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetUserRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetUserRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetUserRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get user details.`, - Long: `Get user details. + cmd.Use = "get ID" + cmd.Short = `Get user details.` + cmd.Long = `Get user details. - Gets information for a specific user in Databricks workspace.`, + Gets information for a specific user in Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -192,48 +256,167 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permission-levels" + cmd.Short = `Get password permission levels.` + cmd.Long = `Get password permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.Users.GetPermissionLevels(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permissions" + cmd.Short = `Get password permissions.` + cmd.Long = `Get password permissions. + + Gets the permissions of all passwords. Passwords can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.Users.GetPermissions(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start list command -var listReq iam.ListUsersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListUsersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListUsersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List users.`, - Long: `List users. + cmd.Use = "list" + cmd.Short = `List users.` + cmd.Long = `List users. - Gets details for all the users associated with a Databricks workspace.`, + Gets details for all the users associated with a Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -250,36 +433,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schemas -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update user details.`, - Long: `Update user details. + cmd.Use = "patch ID" + cmd.Short = `Update user details.` + cmd.Long = `Update user details. Partially updates a user resource by applying the supplied operations on - specific user attributes.`, + specific user attributes.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -313,44 +518,140 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *iam.PasswordPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq iam.PasswordPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions" + cmd.Short = `Set password permissions.` + cmd.Long = `Set password permissions. + + Sets permissions on all passwords. Passwords can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.Users.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start update command -var updateReq iam.User -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.User, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.User + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - updateCmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a user.`, - Long: `Replace a user. + cmd.Use = "update ID" + cmd.Short = `Replace a user.` + cmd.Long = `Replace a user. - Replaces a user's information with the data supplied in request.`, + Replaces a user's information with the data supplied in request.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -385,10 +686,99 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *iam.PasswordPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq iam.PasswordPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions" + cmd.Short = `Update password permissions.` + cmd.Long = `Update password permissions. + + Updates the permissions on all passwords. Passwords can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.Users.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Users diff --git a/cmd/workspace/volumes/volumes.go b/cmd/workspace/volumes/volumes.go index e020700aa..4dbfc5856 100755 --- a/cmd/workspace/volumes/volumes.go +++ b/cmd/workspace/volumes/volumes.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "volumes", - Short: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.`, - Long: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "volumes", + Short: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.`, + Long: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF files, organizing data sets during the data exploration stages in data science, working with @@ -23,32 +28,44 @@ var Cmd = &cobra.Command{ storing library and config files of arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or transforming and querying non-tabular data files in ETL.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } - // This service is being previewed; hide from help output. - Hidden: true, + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateVolumeRequestContent -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateVolumeRequestContent, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateVolumeRequestContent + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the volume.`) - createCmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the volume.`) + cmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud.`) -} - -var createCmd = &cobra.Command{ - Use: "create CATALOG_NAME NAME SCHEMA_NAME VOLUME_TYPE", - Short: `Create a Volume.`, - Long: `Create a Volume. + cmd.Use = "create CATALOG_NAME SCHEMA_NAME NAME VOLUME_TYPE" + cmd.Short = `Create a Volume.` + cmd.Long = `Create a Volume. Creates a new volume. @@ -67,18 +84,20 @@ var createCmd = &cobra.Command{ must have **CREATE EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes existing in the specified storage location. - The specified storage location is not under the location of other - tables, nor volumes, or catalogs or schemas.`, + tables, nor volumes, or catalogs or schemas.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -89,8 +108,8 @@ var createCmd = &cobra.Command{ } } else { createReq.CatalogName = args[0] - createReq.Name = args[1] - createReq.SchemaName = args[2] + createReq.SchemaName = args[1] + createReq.Name = args[2] _, err = fmt.Sscan(args[3], &createReq.VolumeType) if err != nil { return fmt.Errorf("invalid VOLUME_TYPE: %s", args[3]) @@ -102,35 +121,56 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteVolumeRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteVolumeRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteVolumeRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME_ARG", - Short: `Delete a Volume.`, - Long: `Delete a Volume. + cmd.Use = "delete FULL_NAME_ARG" + cmd.Short = `Delete a Volume.` + cmd.Long = `Delete a Volume. Deletes a volume from the specified parent catalog and schema. The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege - on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.`, + on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -158,25 +198,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start list command -var listReq catalog.ListVolumesRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListVolumesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListVolumesRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME SCHEMA_NAME", - Short: `List Volumes.`, - Long: `List Volumes. + cmd.Use = "list CATALOG_NAME SCHEMA_NAME" + cmd.Short = `List Volumes.` + cmd.Long = `List Volumes. Gets an array of all volumes for the current metastore under the parent catalog and schema. @@ -188,15 +248,20 @@ var listCmd = &cobra.Command{ also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - There is no guarantee of a specific ordering of the elements in the array.`, + There is no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -208,36 +273,57 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start read command -var readReq catalog.ReadVolumeRequest -func init() { - Cmd.AddCommand(readCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var readOverrides []func( + *cobra.Command, + *catalog.ReadVolumeRequest, +) + +func newRead() *cobra.Command { + cmd := &cobra.Command{} + + var readReq catalog.ReadVolumeRequest + // TODO: short flags -} - -var readCmd = &cobra.Command{ - Use: "read FULL_NAME_ARG", - Short: `Get a Volume.`, - Long: `Get a Volume. + cmd.Use = "read FULL_NAME_ARG" + cmd.Short = `Get a Volume.` + cmd.Long = `Get a Volume. Gets a volume from the metastore for a specific catalog and schema. The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and - the **USE_SCHEMA** privilege on the parent schema.`, + the **USE_SCHEMA** privilege on the parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -265,29 +351,49 @@ var readCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range readOverrides { + fn(cmd, &readReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRead()) + }) } // start update command -var updateReq catalog.UpdateVolumeRequestContent -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateVolumeRequestContent, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateVolumeRequestContent + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the volume.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the volume.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The identifier of the user who owns the volume.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the volume.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the volume.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The identifier of the user who owns the volume.`) -} - -var updateCmd = &cobra.Command{ - Use: "update FULL_NAME_ARG", - Short: `Update a Volume.`, - Long: `Update a Volume. + cmd.Use = "update FULL_NAME_ARG" + cmd.Short = `Update a Volume.` + cmd.Long = `Update a Volume. Updates the specified volume under the specified parent catalog and schema. @@ -296,11 +402,12 @@ var updateCmd = &cobra.Command{ on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. Currently only the name, the owner or the comment of the volume could be - updated.`, + updated.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -328,10 +435,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Volumes diff --git a/cmd/workspace/warehouses/overrides.go b/cmd/workspace/warehouses/overrides.go index 82319d6fc..0714937c2 100644 --- a/cmd/workspace/warehouses/overrides.go +++ b/cmd/workspace/warehouses/overrides.go @@ -1,10 +1,18 @@ package warehouses -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *sql.ListWarehousesRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "Size"}} {{header "State"}} {{range .}}{{.Id|green}} {{.Name|cyan}} {{.ClusterSize|cyan}} {{if eq .State "RUNNING"}}{{"RUNNING"|green}}{{else if eq .State "STOPPED"}}{{"STOPPED"|red}}{{else}}{{blue "%s" .State}}{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index a29c4031e..c64e0e0b5 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -13,65 +13,86 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "warehouses", - Short: `A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.`, - Long: `A SQL warehouse is a compute resource that lets you run SQL commands on data +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "warehouses", + Short: `A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.`, + Long: `A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.CreateWarehouseRequest -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateWarehouseRequest, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var createReq sql.CreateWarehouseRequest + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().IntVar(&createReq.AutoStopMins, "auto-stop-mins", createReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) + cmd.Flags().IntVar(&createReq.AutoStopMins, "auto-stop-mins", createReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) // TODO: complex arg: channel - createCmd.Flags().StringVar(&createReq.ClusterSize, "cluster-size", createReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) - createCmd.Flags().StringVar(&createReq.CreatorName, "creator-name", createReq.CreatorName, `warehouse creator name.`) - createCmd.Flags().BoolVar(&createReq.EnablePhoton, "enable-photon", createReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) - createCmd.Flags().BoolVar(&createReq.EnableServerlessCompute, "enable-serverless-compute", createReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) - createCmd.Flags().StringVar(&createReq.InstanceProfileArn, "instance-profile-arn", createReq.InstanceProfileArn, `Deprecated.`) - createCmd.Flags().IntVar(&createReq.MaxNumClusters, "max-num-clusters", createReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) - createCmd.Flags().IntVar(&createReq.MinNumClusters, "min-num-clusters", createReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Logical name for the cluster.`) - createCmd.Flags().Var(&createReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) + cmd.Flags().StringVar(&createReq.ClusterSize, "cluster-size", createReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) + cmd.Flags().StringVar(&createReq.CreatorName, "creator-name", createReq.CreatorName, `warehouse creator name.`) + cmd.Flags().BoolVar(&createReq.EnablePhoton, "enable-photon", createReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) + cmd.Flags().BoolVar(&createReq.EnableServerlessCompute, "enable-serverless-compute", createReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) + cmd.Flags().StringVar(&createReq.InstanceProfileArn, "instance-profile-arn", createReq.InstanceProfileArn, `Deprecated.`) + cmd.Flags().IntVar(&createReq.MaxNumClusters, "max-num-clusters", createReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) + cmd.Flags().IntVar(&createReq.MinNumClusters, "min-num-clusters", createReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) + cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Logical name for the cluster.`) + cmd.Flags().Var(&createReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) // TODO: complex arg: tags - createCmd.Flags().Var(&createReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) + cmd.Flags().Var(&createReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a warehouse.`, - Long: `Create a warehouse. + cmd.Use = "create" + cmd.Short = `Create a warehouse.` + cmd.Long = `Create a warehouse. - Creates a new SQL warehouse.`, + Creates a new SQL warehouse.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -107,31 +128,52 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteWarehouseRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteWarehouseRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteWarehouseRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a warehouse.`, - Long: `Delete a warehouse. + cmd.Use = "delete ID" + cmd.Short = `Delete a warehouse.` + cmd.Long = `Delete a warehouse. - Deletes a SQL warehouse.`, + Deletes a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -159,53 +201,73 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq sql.EditWarehouseRequest -var editJson flags.JsonFlag -var editSkipWait bool -var editTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *sql.EditWarehouseRequest, +) -func init() { - Cmd.AddCommand(editCmd) +func newEdit() *cobra.Command { + cmd := &cobra.Command{} - editCmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) - editCmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var editReq sql.EditWarehouseRequest + var editJson flags.JsonFlag + + var editSkipWait bool + var editTimeout time.Duration + + cmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().IntVar(&editReq.AutoStopMins, "auto-stop-mins", editReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) + cmd.Flags().IntVar(&editReq.AutoStopMins, "auto-stop-mins", editReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) // TODO: complex arg: channel - editCmd.Flags().StringVar(&editReq.ClusterSize, "cluster-size", editReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) - editCmd.Flags().StringVar(&editReq.CreatorName, "creator-name", editReq.CreatorName, `warehouse creator name.`) - editCmd.Flags().BoolVar(&editReq.EnablePhoton, "enable-photon", editReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) - editCmd.Flags().BoolVar(&editReq.EnableServerlessCompute, "enable-serverless-compute", editReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) - editCmd.Flags().StringVar(&editReq.InstanceProfileArn, "instance-profile-arn", editReq.InstanceProfileArn, `Deprecated.`) - editCmd.Flags().IntVar(&editReq.MaxNumClusters, "max-num-clusters", editReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) - editCmd.Flags().IntVar(&editReq.MinNumClusters, "min-num-clusters", editReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) - editCmd.Flags().StringVar(&editReq.Name, "name", editReq.Name, `Logical name for the cluster.`) - editCmd.Flags().Var(&editReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) + cmd.Flags().StringVar(&editReq.ClusterSize, "cluster-size", editReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) + cmd.Flags().StringVar(&editReq.CreatorName, "creator-name", editReq.CreatorName, `warehouse creator name.`) + cmd.Flags().BoolVar(&editReq.EnablePhoton, "enable-photon", editReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) + cmd.Flags().BoolVar(&editReq.EnableServerlessCompute, "enable-serverless-compute", editReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) + cmd.Flags().StringVar(&editReq.InstanceProfileArn, "instance-profile-arn", editReq.InstanceProfileArn, `Deprecated.`) + cmd.Flags().IntVar(&editReq.MaxNumClusters, "max-num-clusters", editReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) + cmd.Flags().IntVar(&editReq.MinNumClusters, "min-num-clusters", editReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) + cmd.Flags().StringVar(&editReq.Name, "name", editReq.Name, `Logical name for the cluster.`) + cmd.Flags().Var(&editReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) // TODO: complex arg: tags - editCmd.Flags().Var(&editReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) + cmd.Flags().Var(&editReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) -} - -var editCmd = &cobra.Command{ - Use: "edit ID", - Short: `Update a warehouse.`, - Long: `Update a warehouse. + cmd.Use = "edit ID" + cmd.Short = `Update a warehouse.` + cmd.Long = `Update a warehouse. - Updates the configuration for a SQL warehouse.`, + Updates the configuration for a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -258,37 +320,57 @@ var editCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start get command -var getReq sql.GetWarehouseRequest -var getSkipWait bool -var getTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetWarehouseRequest, +) -func init() { - Cmd.AddCommand(getCmd) +func newGet() *cobra.Command { + cmd := &cobra.Command{} - getCmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) - getCmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var getReq sql.GetWarehouseRequest + + var getSkipWait bool + var getTimeout time.Duration + + cmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get warehouse info.`, - Long: `Get warehouse info. + cmd.Use = "get ID" + cmd.Short = `Get warehouse info.` + cmd.Long = `Get warehouse info. - Gets the information for a single SQL warehouse.`, + Gets the information for a single SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -316,30 +398,195 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *sql.GetWarehousePermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq sql.GetWarehousePermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels WAREHOUSE_ID" + cmd.Short = `Get SQL warehouse permission levels.` + cmd.Long = `Get SQL warehouse permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + getPermissionLevelsReq.WarehouseId = args[0] + + response, err := w.Warehouses.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *sql.GetWarehousePermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq sql.GetWarehousePermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions WAREHOUSE_ID" + cmd.Short = `Get SQL warehouse permissions.` + cmd.Long = `Get SQL warehouse permissions. + + Gets the permissions of a SQL warehouse. SQL warehouses can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + getPermissionsReq.WarehouseId = args[0] + + response, err := w.Warehouses.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start get-workspace-warehouse-config command -func init() { - Cmd.AddCommand(getWorkspaceWarehouseConfigCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getWorkspaceWarehouseConfigOverrides []func( + *cobra.Command, +) -} +func newGetWorkspaceWarehouseConfig() *cobra.Command { + cmd := &cobra.Command{} -var getWorkspaceWarehouseConfigCmd = &cobra.Command{ - Use: "get-workspace-warehouse-config", - Short: `Get the workspace configuration.`, - Long: `Get the workspace configuration. + cmd.Use = "get-workspace-warehouse-config" + cmd.Short = `Get the workspace configuration.` + cmd.Long = `Get the workspace configuration. Gets the workspace level configuration that is shared by all SQL warehouses in - a workspace.`, + a workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Warehouses.GetWorkspaceWarehouseConfig(ctx) @@ -347,42 +594,64 @@ var getWorkspaceWarehouseConfigCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getWorkspaceWarehouseConfigOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetWorkspaceWarehouseConfig()) + }) } // start list command -var listReq sql.ListWarehousesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListWarehousesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListWarehousesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().IntVar(&listReq.RunAsUserId, "run-as-user-id", listReq.RunAsUserId, `Service Principal which will be used to fetch the list of warehouses.`) + cmd.Flags().IntVar(&listReq.RunAsUserId, "run-as-user-id", listReq.RunAsUserId, `Service Principal which will be used to fetch the list of warehouses.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List warehouses.`, - Long: `List warehouses. + cmd.Use = "list" + cmd.Short = `List warehouses.` + cmd.Long = `List warehouses. - Lists all SQL warehouses that a user has manager permissions on.`, + Lists all SQL warehouses that a user has manager permissions on.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -399,51 +668,157 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *sql.WarehousePermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq sql.WarehousePermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions WAREHOUSE_ID" + cmd.Short = `Set SQL warehouse permissions.` + cmd.Long = `Set SQL warehouse permissions. + + Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + setPermissionsReq.WarehouseId = args[0] + + response, err := w.Warehouses.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) } // start set-workspace-warehouse-config command -var setWorkspaceWarehouseConfigReq sql.SetWorkspaceWarehouseConfigRequest -var setWorkspaceWarehouseConfigJson flags.JsonFlag -func init() { - Cmd.AddCommand(setWorkspaceWarehouseConfigCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setWorkspaceWarehouseConfigOverrides []func( + *cobra.Command, + *sql.SetWorkspaceWarehouseConfigRequest, +) + +func newSetWorkspaceWarehouseConfig() *cobra.Command { + cmd := &cobra.Command{} + + var setWorkspaceWarehouseConfigReq sql.SetWorkspaceWarehouseConfigRequest + var setWorkspaceWarehouseConfigJson flags.JsonFlag + // TODO: short flags - setWorkspaceWarehouseConfigCmd.Flags().Var(&setWorkspaceWarehouseConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setWorkspaceWarehouseConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: channel // TODO: complex arg: config_param // TODO: array: data_access_config // TODO: array: enabled_warehouse_types // TODO: complex arg: global_param - setWorkspaceWarehouseConfigCmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.GoogleServiceAccount, "google-service-account", setWorkspaceWarehouseConfigReq.GoogleServiceAccount, `GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage.`) - setWorkspaceWarehouseConfigCmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.InstanceProfileArn, "instance-profile-arn", setWorkspaceWarehouseConfigReq.InstanceProfileArn, `AWS Only: Instance profile used to pass IAM role to the cluster.`) - setWorkspaceWarehouseConfigCmd.Flags().Var(&setWorkspaceWarehouseConfigReq.SecurityPolicy, "security-policy", `Security policy for warehouses.`) + cmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.GoogleServiceAccount, "google-service-account", setWorkspaceWarehouseConfigReq.GoogleServiceAccount, `GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage.`) + cmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.InstanceProfileArn, "instance-profile-arn", setWorkspaceWarehouseConfigReq.InstanceProfileArn, `AWS Only: Instance profile used to pass IAM role to the cluster.`) + cmd.Flags().Var(&setWorkspaceWarehouseConfigReq.SecurityPolicy, "security-policy", `Security policy for warehouses.`) // TODO: complex arg: sql_configuration_parameters -} - -var setWorkspaceWarehouseConfigCmd = &cobra.Command{ - Use: "set-workspace-warehouse-config", - Short: `Set the workspace configuration.`, - Long: `Set the workspace configuration. + cmd.Use = "set-workspace-warehouse-config" + cmd.Short = `Set the workspace configuration.` + cmd.Long = `Set the workspace configuration. Sets the workspace level configuration that is shared by all SQL warehouses in - a workspace.`, + a workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -460,37 +835,57 @@ var setWorkspaceWarehouseConfigCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setWorkspaceWarehouseConfigOverrides { + fn(cmd, &setWorkspaceWarehouseConfigReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetWorkspaceWarehouseConfig()) + }) } // start start command -var startReq sql.StartRequest -var startSkipWait bool -var startTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startOverrides []func( + *cobra.Command, + *sql.StartRequest, +) -func init() { - Cmd.AddCommand(startCmd) +func newStart() *cobra.Command { + cmd := &cobra.Command{} - startCmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) - startCmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var startReq sql.StartRequest + + var startSkipWait bool + var startTimeout time.Duration + + cmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var startCmd = &cobra.Command{ - Use: "start ID", - Short: `Start a warehouse.`, - Long: `Start a warehouse. + cmd.Use = "start ID" + cmd.Short = `Start a warehouse.` + cmd.Long = `Start a warehouse. - Starts a SQL warehouse.`, + Starts a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -537,37 +932,57 @@ var startCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startOverrides { + fn(cmd, &startReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStart()) + }) } // start stop command -var stopReq sql.StopRequest -var stopSkipWait bool -var stopTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var stopOverrides []func( + *cobra.Command, + *sql.StopRequest, +) -func init() { - Cmd.AddCommand(stopCmd) +func newStop() *cobra.Command { + cmd := &cobra.Command{} - stopCmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach STOPPED state`) - stopCmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach STOPPED state`) + var stopReq sql.StopRequest + + var stopSkipWait bool + var stopTimeout time.Duration + + cmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach STOPPED state`) + cmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach STOPPED state`) // TODO: short flags -} - -var stopCmd = &cobra.Command{ - Use: "stop ID", - Short: `Stop a warehouse.`, - Long: `Stop a warehouse. + cmd.Use = "stop ID" + cmd.Short = `Stop a warehouse.` + cmd.Long = `Stop a warehouse. - Stops a SQL warehouse.`, + Stops a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -614,10 +1029,108 @@ var stopCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range stopOverrides { + fn(cmd, &stopReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStop()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *sql.WarehousePermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq sql.WarehousePermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions WAREHOUSE_ID" + cmd.Short = `Update SQL warehouse permissions.` + cmd.Long = `Update SQL warehouse permissions. + + Updates the permissions on a SQL warehouse. SQL warehouses can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + updatePermissionsReq.WarehouseId = args[0] + + response, err := w.Warehouses.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Warehouses diff --git a/cmd/workspace/workspace-bindings/workspace-bindings.go b/cmd/workspace/workspace-bindings/workspace-bindings.go index 8780106b1..3d7fa677c 100755 --- a/cmd/workspace/workspace-bindings/workspace-bindings.go +++ b/cmd/workspace/workspace-bindings/workspace-bindings.go @@ -10,44 +10,66 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace-bindings", - Short: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__.`, - Long: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-bindings", + Short: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__.`, + Long: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ catalog can be accessed from any workspace, while an __ISOLATED__ catalog can only be access from a configured list of workspaces. A catalog's workspace bindings can be configured by a metastore admin or the owner of the catalog.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq catalog.GetWorkspaceBindingRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetWorkspaceBindingRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetWorkspaceBindingRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get catalog workspace bindings.`, - Long: `Get catalog workspace bindings. + cmd.Use = "get NAME" + cmd.Short = `Get catalog workspace bindings.` + cmd.Long = `Get catalog workspace bindings. Gets workspace bindings of the catalog. The caller must be a metastore admin - or an owner of the catalog.`, + or an owner of the catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -58,41 +80,63 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start update command -var updateReq catalog.UpdateWorkspaceBindings -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateWorkspaceBindings, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateWorkspaceBindings + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: assign_workspaces // TODO: array: unassign_workspaces -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update catalog workspace bindings.`, - Long: `Update catalog workspace bindings. + cmd.Use = "update NAME" + cmd.Short = `Update catalog workspace bindings.` + cmd.Long = `Update catalog workspace bindings. Updates workspace bindings of the catalog. The caller must be a metastore - admin or an owner of the catalog.`, + admin or an owner of the catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -109,10 +153,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service WorkspaceBindings diff --git a/cmd/workspace/workspace-conf/workspace-conf.go b/cmd/workspace/workspace-conf/workspace-conf.go index f2f0bb759..d828f66ea 100755 --- a/cmd/workspace/workspace-conf/workspace-conf.go +++ b/cmd/workspace/workspace-conf/workspace-conf.go @@ -10,38 +10,60 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace-conf", - Short: `This API allows updating known workspace settings for advanced users.`, - Long: `This API allows updating known workspace settings for advanced users.`, - Annotations: map[string]string{ - "package": "settings", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-conf", + Short: `This API allows updating known workspace settings for advanced users.`, + Long: `This API allows updating known workspace settings for advanced users.`, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get-status command -var getStatusReq settings.GetStatusRequest -func init() { - Cmd.AddCommand(getStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getStatusOverrides []func( + *cobra.Command, + *settings.GetStatusRequest, +) + +func newGetStatus() *cobra.Command { + cmd := &cobra.Command{} + + var getStatusReq settings.GetStatusRequest + // TODO: short flags -} - -var getStatusCmd = &cobra.Command{ - Use: "get-status KEYS", - Short: `Check configuration status.`, - Long: `Check configuration status. + cmd.Use = "get-status KEYS" + cmd.Short = `Check configuration status.` + cmd.Long = `Check configuration status. - Gets the configuration status for a workspace.`, + Gets the configuration status for a workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -52,41 +74,63 @@ var getStatusCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getStatusOverrides { + fn(cmd, &getStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetStatus()) + }) } // start set-status command -var setStatusReq settings.WorkspaceConf -var setStatusJson flags.JsonFlag -func init() { - Cmd.AddCommand(setStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setStatusOverrides []func( + *cobra.Command, + *settings.WorkspaceConf, +) + +func newSetStatus() *cobra.Command { + cmd := &cobra.Command{} + + var setStatusReq settings.WorkspaceConf + var setStatusJson flags.JsonFlag + // TODO: short flags - setStatusCmd.Flags().Var(&setStatusJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setStatusJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setStatusCmd = &cobra.Command{ - Use: "set-status", - Short: `Enable/disable features.`, - Long: `Enable/disable features. + cmd.Use = "set-status" + cmd.Short = `Enable/disable features.` + cmd.Long = `Enable/disable features. Sets the configuration status for a workspace, including enabling or disabling - it.`, + it.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -103,10 +147,24 @@ var setStatusCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setStatusOverrides { + fn(cmd, &setStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetStatus()) + }) } // end service WorkspaceConf diff --git a/cmd/workspace/workspace/export_dir.go b/cmd/workspace/workspace/export_dir.go index 1c3fe968f..4f50a96e4 100644 --- a/cmd/workspace/workspace/export_dir.go +++ b/cmd/workspace/workspace/export_dir.go @@ -15,9 +15,19 @@ import ( "github.com/spf13/cobra" ) +type exportDirOptions struct { + sourceDir string + targetDir string + overwrite bool +} + // The callback function exports the file specified at relPath. This function is // meant to be used in conjunction with fs.WalkDir -func exportFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceDir, targetDir string) func(string, fs.DirEntry, error) error { +func (opts exportDirOptions) callback(ctx context.Context, workspaceFiler filer.Filer) func(string, fs.DirEntry, error) error { + sourceDir := opts.sourceDir + targetDir := opts.targetDir + overwrite := opts.overwrite + return func(relPath string, d fs.DirEntry, err error) error { if err != nil { return err @@ -55,7 +65,7 @@ func exportFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD // Skip file if a file already exists in path. // os.Stat returns a fs.ErrNotExist if a file does not exist at path. // If a file exists, and overwrite is not set, we skip exporting the file - if _, err := os.Stat(targetPath); err == nil && !exportOverwrite { + if _, err := os.Stat(targetPath); err == nil && !overwrite { // Log event that this file/directory has been skipped return cmdio.RenderWithTemplate(ctx, newFileSkippedEvent(relPath, targetPath), "{{.SourcePath}} -> {{.TargetPath}} (skipped; already exists)\n") } @@ -80,46 +90,56 @@ func exportFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD } } -var exportDirCommand = &cobra.Command{ - Use: "export-dir SOURCE_PATH TARGET_PATH", - Short: `Export a directory from a Databricks workspace to the local file system.`, - Long: ` -Export a directory recursively from a Databricks workspace to the local file system. -Notebooks will have one of the following extensions added .scala, .py, .sql, or .r -based on the language type. -`, - PreRunE: root.MustWorkspaceClient, - Args: cobra.ExactArgs(2), - RunE: func(cmd *cobra.Command, args []string) (err error) { +func newExportDir() *cobra.Command { + cmd := &cobra.Command{} + + var opts exportDirOptions + + cmd.Flags().BoolVar(&opts.overwrite, "overwrite", false, "overwrite existing local files") + + cmd.Use = "export-dir SOURCE_PATH TARGET_PATH" + cmd.Short = `Export a directory from a Databricks workspace to the local file system.` + cmd.Long = ` + Export a directory recursively from a Databricks workspace to the local file system. + Notebooks will have one of the following extensions added .scala, .py, .sql, or .r + based on the language type. + ` + + cmd.Annotations = make(map[string]string) + cmd.Args = cobra.ExactArgs(2) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - sourceDir := args[0] - targetDir := args[1] + opts.sourceDir = args[0] + opts.targetDir = args[1] // Initialize a filer and a file system on the source directory - workspaceFiler, err := filer.NewWorkspaceFilesClient(w, sourceDir) + workspaceFiler, err := filer.NewWorkspaceFilesClient(w, opts.sourceDir) if err != nil { return err } workspaceFS := filer.NewFS(ctx, workspaceFiler) // TODO: print progress events on stderr instead: https://github.com/databricks/cli/issues/448 - err = cmdio.RenderJson(ctx, newExportStartedEvent(sourceDir)) + err = cmdio.RenderJson(ctx, newExportStartedEvent(opts.sourceDir)) if err != nil { return err } - err = fs.WalkDir(workspaceFS, ".", exportFileCallback(ctx, workspaceFiler, sourceDir, targetDir)) + err = fs.WalkDir(workspaceFS, ".", opts.callback(ctx, workspaceFiler)) if err != nil { return err } - return cmdio.RenderJson(ctx, newExportCompletedEvent(targetDir)) - }, + return cmdio.RenderJson(ctx, newExportCompletedEvent(opts.targetDir)) + } + + return cmd } -var exportOverwrite bool - func init() { - exportDirCommand.Flags().BoolVar(&exportOverwrite, "overwrite", false, "overwrite existing local files") - Cmd.AddCommand(exportDirCommand) + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExportDir()) + }) } diff --git a/cmd/workspace/workspace/import_dir.go b/cmd/workspace/workspace/import_dir.go index af9c38ca3..bc0b80667 100644 --- a/cmd/workspace/workspace/import_dir.go +++ b/cmd/workspace/workspace/import_dir.go @@ -16,6 +16,12 @@ import ( "github.com/spf13/cobra" ) +type importDirOptions struct { + sourceDir string + targetDir string + overwrite bool +} + // The callback function imports the file specified at sourcePath. This function is // meant to be used in conjunction with fs.WalkDir // @@ -31,7 +37,11 @@ import ( // 1. Read the notebook, referring to it using it's local name "foo\\myNotebook.py" // 2. API call to import the notebook to the workspace, using it API payload name "foo/myNotebook.py" // 3. The notebook is materialized in the workspace using it's remote name "foo/myNotebook" -func importFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceDir, targetDir string) func(string, fs.DirEntry, error) error { +func (opts importDirOptions) callback(ctx context.Context, workspaceFiler filer.Filer) func(string, fs.DirEntry, error) error { + sourceDir := opts.sourceDir + targetDir := opts.targetDir + overwrite := opts.overwrite + return func(sourcePath string, d fs.DirEntry, err error) error { if err != nil { return err @@ -72,7 +82,7 @@ func importFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD defer f.Close() // Create file in WSFS - if importOverwrite { + if overwrite { err = workspaceFiler.Write(ctx, nameForApiCall, f, filer.OverwriteIfExists) if err != nil { return err @@ -94,45 +104,55 @@ func importFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD } } -var importDirCommand = &cobra.Command{ - Use: "import-dir SOURCE_PATH TARGET_PATH", - Short: `Import a directory from the local filesystem to a Databricks workspace.`, - Long: ` +func newImportDir() *cobra.Command { + cmd := &cobra.Command{} + + var opts importDirOptions + + cmd.Flags().BoolVar(&opts.overwrite, "overwrite", false, "overwrite existing workspace files") + + cmd.Use = "import-dir SOURCE_PATH TARGET_PATH" + cmd.Short = `Import a directory from the local filesystem to a Databricks workspace.` + cmd.Long = ` Import a directory recursively from the local file system to a Databricks workspace. Notebooks will have their extensions (one of .scala, .py, .sql, .ipynb, .r) stripped -`, - PreRunE: root.MustWorkspaceClient, - Args: cobra.ExactArgs(2), - RunE: func(cmd *cobra.Command, args []string) (err error) { +` + + cmd.Annotations = make(map[string]string) + cmd.Args = cobra.ExactArgs(2) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - sourceDir := args[0] - targetDir := args[1] + opts.sourceDir = args[0] + opts.targetDir = args[1] // Initialize a filer rooted at targetDir - workspaceFiler, err := filer.NewWorkspaceFilesClient(w, targetDir) + workspaceFiler, err := filer.NewWorkspaceFilesClient(w, opts.targetDir) if err != nil { return err } // TODO: print progress events on stderr instead: https://github.com/databricks/cli/issues/448 - err = cmdio.RenderJson(ctx, newImportStartedEvent(sourceDir)) + err = cmdio.RenderJson(ctx, newImportStartedEvent(opts.sourceDir)) if err != nil { return err } // Walk local directory tree and import files to the workspace - err = filepath.WalkDir(sourceDir, importFileCallback(ctx, workspaceFiler, sourceDir, targetDir)) + err = filepath.WalkDir(opts.sourceDir, opts.callback(ctx, workspaceFiler)) if err != nil { return err } - return cmdio.RenderJson(ctx, newImportCompletedEvent(targetDir)) - }, -} + return cmdio.RenderJson(ctx, newImportCompletedEvent(opts.targetDir)) + } -var importOverwrite bool + return cmd +} func init() { - importDirCommand.Flags().BoolVar(&importOverwrite, "overwrite", false, "overwrite existing workspace files") - Cmd.AddCommand(importDirCommand) + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newImportDir()) + }) } diff --git a/cmd/workspace/workspace/overrides.go b/cmd/workspace/workspace/overrides.go index e1b97c598..9cae5bef5 100644 --- a/cmd/workspace/workspace/overrides.go +++ b/cmd/workspace/workspace/overrides.go @@ -1,14 +1,25 @@ package workspace -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *workspace.ListWorkspaceRequest) { listReq.Path = "/" listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Type"}} {{header "Language"}} {{header "Path"}} {{range .}}{{green "%d" .ObjectId}} {{blue "%s" .ObjectType}} {{cyan "%s" .Language}} {{.Path|cyan}} {{end}}`) +} +func exportOverride(exportCmd *cobra.Command, exportReq *workspace.ExportRequest) { // The export command prints the contents of the file to stdout by default. exportCmd.Annotations["template"] = `{{.Content | b64_decode}}` } + +func init() { + listOverrides = append(listOverrides, listOverride) + exportOverrides = append(exportOverrides, exportOverride) +} diff --git a/cmd/workspace/workspace/workspace.go b/cmd/workspace/workspace/workspace.go index ab9c6aec0..124680f0b 100755 --- a/cmd/workspace/workspace/workspace.go +++ b/cmd/workspace/workspace/workspace.go @@ -12,36 +12,56 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace", - Short: `The Workspace API allows you to list, import, export, and delete notebooks and folders.`, - Long: `The Workspace API allows you to list, import, export, and delete notebooks and +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace", + Short: `The Workspace API allows you to list, import, export, and delete notebooks and folders.`, + Long: `The Workspace API allows you to list, import, export, and delete notebooks and folders. A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text.`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete command -var deleteReq workspace.Delete -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *workspace.Delete, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq workspace.Delete + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) - deleteCmd.Flags().BoolVar(&deleteReq.Recursive, "recursive", deleteReq.Recursive, `The flag that specifies whether to delete the object recursively.`) + cmd.Flags().BoolVar(&deleteReq.Recursive, "recursive", deleteReq.Recursive, `The flag that specifies whether to delete the object recursively.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete PATH", - Short: `Delete a workspace object.`, - Long: `Delete a workspace object. + cmd.Use = "delete PATH" + cmd.Short = `Delete a workspace object.` + cmd.Long = `Delete a workspace object. Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If path does not exist, this call returns an @@ -50,11 +70,12 @@ var deleteCmd = &cobra.Command{ DIRECTORY_NOT_EMPTY. Object deletion cannot be undone and deleting a directory recursively is not - atomic.`, + atomic.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -89,27 +110,47 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start export command -var exportReq workspace.ExportRequest -func init() { - Cmd.AddCommand(exportCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var exportOverrides []func( + *cobra.Command, + *workspace.ExportRequest, +) + +func newExport() *cobra.Command { + cmd := &cobra.Command{} + + var exportReq workspace.ExportRequest + // TODO: short flags - exportCmd.Flags().Var(&exportReq.Format, "format", `This specifies the format of the exported file.`) + cmd.Flags().Var(&exportReq.Format, "format", `This specifies the format of the exported file.`) -} - -var exportCmd = &cobra.Command{ - Use: "export PATH", - Short: `Export a workspace object.`, - Long: `Export a workspace object. + cmd.Use = "export PATH" + cmd.Short = `Export a workspace object.` + cmd.Long = `Export a workspace object. Exports an object or the contents of an entire directory. @@ -118,11 +159,12 @@ var exportCmd = &cobra.Command{ If the exported data would exceed size limit, this call returns MAX_NOTEBOOK_SIZE_EXCEEDED. Currently, this API does not support exporting a - library.`, + library.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -150,36 +192,183 @@ var exportCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range exportOverrides { + fn(cmd, &exportReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExport()) + }) +} + +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *workspace.GetWorkspaceObjectPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq workspace.GetWorkspaceObjectPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Get workspace object permission levels.` + cmd.Long = `Get workspace object permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.WorkspaceObjectType = args[0] + getPermissionLevelsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *workspace.GetWorkspaceObjectPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq workspace.GetWorkspaceObjectPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Get workspace object permissions.` + cmd.Long = `Get workspace object permissions. + + Gets the permissions of a workspace object. Workspace objects can inherit + permissions from their parent objects or root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.WorkspaceObjectType = args[0] + getPermissionsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) } // start get-status command -var getStatusReq workspace.GetStatusRequest -func init() { - Cmd.AddCommand(getStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getStatusOverrides []func( + *cobra.Command, + *workspace.GetStatusRequest, +) + +func newGetStatus() *cobra.Command { + cmd := &cobra.Command{} + + var getStatusReq workspace.GetStatusRequest + // TODO: short flags -} - -var getStatusCmd = &cobra.Command{ - Use: "get-status PATH", - Short: `Get status.`, - Long: `Get status. + cmd.Use = "get-status PATH" + cmd.Short = `Get status.` + cmd.Long = `Get status. Gets the status of an object or a directory. If path does not exist, this - call returns an error RESOURCE_DOES_NOT_EXIST.`, + call returns an error RESOURCE_DOES_NOT_EXIST.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -190,48 +379,70 @@ var getStatusCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getStatusOverrides { + fn(cmd, &getStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetStatus()) + }) } // start import command -var importReq workspace.Import -var importJson flags.JsonFlag -func init() { - Cmd.AddCommand(importCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var importOverrides []func( + *cobra.Command, + *workspace.Import, +) + +func newImport() *cobra.Command { + cmd := &cobra.Command{} + + var importReq workspace.Import + var importJson flags.JsonFlag + // TODO: short flags - importCmd.Flags().Var(&importJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&importJson, "json", `either inline JSON string or @path/to/file.json with request body`) - importCmd.Flags().StringVar(&importReq.Content, "content", importReq.Content, `The base64-encoded content.`) - importCmd.Flags().Var(&importReq.Format, "format", `This specifies the format of the file to be imported.`) - importCmd.Flags().Var(&importReq.Language, "language", `The language of the object.`) - importCmd.Flags().BoolVar(&importReq.Overwrite, "overwrite", importReq.Overwrite, `The flag that specifies whether to overwrite existing object.`) + cmd.Flags().StringVar(&importReq.Content, "content", importReq.Content, `The base64-encoded content.`) + cmd.Flags().Var(&importReq.Format, "format", `This specifies the format of the file to be imported.`) + cmd.Flags().Var(&importReq.Language, "language", `The language of the object.`) + cmd.Flags().BoolVar(&importReq.Overwrite, "overwrite", importReq.Overwrite, `The flag that specifies whether to overwrite existing object.`) -} - -var importCmd = &cobra.Command{ - Use: "import PATH", - Short: `Import a workspace object.`, - Long: `Import a workspace object. + cmd.Use = "import PATH" + cmd.Short = `Import a workspace object.` + cmd.Long = `Import a workspace object. Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If path already exists and overwrite is set to false, this call returns an error RESOURCE_ALREADY_EXISTS. One can only - use DBC format to import a directory.`, + use DBC format to import a directory.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -249,39 +460,61 @@ var importCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range importOverrides { + fn(cmd, &importReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newImport()) + }) } // start list command -var listReq workspace.ListWorkspaceRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *workspace.ListWorkspaceRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq workspace.ListWorkspaceRequest + // TODO: short flags - listCmd.Flags().IntVar(&listReq.NotebooksModifiedAfter, "notebooks-modified-after", listReq.NotebooksModifiedAfter, `UTC timestamp in milliseconds.`) + cmd.Flags().IntVar(&listReq.NotebooksModifiedAfter, "notebooks-modified-after", listReq.NotebooksModifiedAfter, `UTC timestamp in milliseconds.`) -} - -var listCmd = &cobra.Command{ - Use: "list PATH", - Short: `List contents.`, - Long: `List contents. + cmd.Use = "list PATH" + cmd.Short = `List contents.` + cmd.Long = `List contents. Lists the contents of a directory, or the object if it is not a directory. If the input path does not exist, this call returns an error - RESOURCE_DOES_NOT_EXIST.`, + RESOURCE_DOES_NOT_EXIST.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -292,38 +525,59 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start mkdirs command -var mkdirsReq workspace.Mkdirs -var mkdirsJson flags.JsonFlag -func init() { - Cmd.AddCommand(mkdirsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var mkdirsOverrides []func( + *cobra.Command, + *workspace.Mkdirs, +) + +func newMkdirs() *cobra.Command { + cmd := &cobra.Command{} + + var mkdirsReq workspace.Mkdirs + var mkdirsJson flags.JsonFlag + // TODO: short flags - mkdirsCmd.Flags().Var(&mkdirsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&mkdirsJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var mkdirsCmd = &cobra.Command{ - Use: "mkdirs PATH", - Short: `Create a directory.`, - Long: `Create a directory. + cmd.Use = "mkdirs PATH" + cmd.Short = `Create a directory.` + cmd.Long = `Create a directory. Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error RESOURCE_ALREADY_EXISTS. Note that if this operation fails it may have succeeded in creating some of - the necessary parent directories.`, + the necessary parent directories.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -358,10 +612,170 @@ var mkdirsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range mkdirsOverrides { + fn(cmd, &mkdirsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newMkdirs()) + }) +} + +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *workspace.WorkspaceObjectPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq workspace.WorkspaceObjectPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Set workspace object permissions.` + cmd.Long = `Set workspace object permissions. + + Sets permissions on a workspace object. Workspace objects can inherit + permissions from their parent objects or root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.WorkspaceObjectType = args[0] + setPermissionsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *workspace.WorkspaceObjectPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq workspace.WorkspaceObjectPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Update workspace object permissions.` + cmd.Long = `Update workspace object permissions. + + Updates the permissions on a workspace object. Workspace objects can inherit + permissions from their parent objects or root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.WorkspaceObjectType = args[0] + updatePermissionsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Workspace diff --git a/go.mod b/go.mod index 610404bb4..14c85e675 100644 --- a/go.mod +++ b/go.mod @@ -1,10 +1,10 @@ module github.com/databricks/cli -go 1.18 +go 1.21 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.12.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.19.1 // Apache 2.0 github.com/fatih/color v1.15.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.3.0 // BSD-3-Clause @@ -24,15 +24,15 @@ require ( github.com/whilp/git-urls v1.0.0 // MIT golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 golang.org/x/mod v0.12.0 - golang.org/x/oauth2 v0.10.0 + golang.org/x/oauth2 v0.11.0 golang.org/x/sync v0.3.0 - golang.org/x/term v0.10.0 - golang.org/x/text v0.11.0 + golang.org/x/term v0.11.0 + golang.org/x/text v0.12.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0 ) require ( - cloud.google.com/go/compute v1.20.1 // indirect + cloud.google.com/go/compute v1.23.0 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect @@ -42,7 +42,7 @@ require ( github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-querystring v1.1.0 // indirect - github.com/google/s2a-go v0.1.4 // indirect + github.com/google/s2a-go v0.1.5 // indirect github.com/googleapis/enterprise-certificate-proxy v0.2.5 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -50,14 +50,14 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/zclconf/go-cty v1.13.2 // indirect go.opencensus.io v0.24.0 // indirect - golang.org/x/crypto v0.11.0 // indirect - golang.org/x/net v0.12.0 // indirect - golang.org/x/sys v0.10.0 // indirect + golang.org/x/crypto v0.12.0 // indirect + golang.org/x/net v0.14.0 // indirect + golang.org/x/sys v0.11.0 // indirect golang.org/x/time v0.3.0 // indirect - google.golang.org/api v0.129.0 // indirect + google.golang.org/api v0.138.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect - google.golang.org/grpc v1.56.1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect + google.golang.org/grpc v1.57.0 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 176d39254..20c985b07 100644 --- a/go.sum +++ b/go.sum @@ -1,14 +1,16 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go/compute v1.20.1 h1:6aKEtlUiwEpJzM001l0yFkpXmUVXaN8W+fbkb2AZNbg= -cloud.google.com/go/compute v1.20.1/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= +cloud.google.com/go/compute v1.23.0 h1:tP41Zoavr8ptEqaW6j+LQOnyBBhO7OkOMAGrgLopTwY= +cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 h1:wPbRQzjjwFc0ih8puEVAOFGELsn1zoIIYdxvML7mDxA= github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8/go.mod h1:I0gYDMZ6Z5GRU7l58bNFSkPTFN6Yl12dsUlAZ8xy98g= github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ= +github.com/acomagu/bufpipe v1.0.4/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= @@ -34,12 +36,13 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/databricks/databricks-sdk-go v0.12.0 h1:VgMJpvEiyRRrJ0mQx22Rkc73zjxUe125Ou9c5C99phM= -github.com/databricks/databricks-sdk-go v0.12.0/go.mod h1:h/oWnnfWcJQAotAhZS/GMnlcaE/8WhuZ5Vj7el/6Gn8= +github.com/databricks/databricks-sdk-go v0.19.1 h1:hP7xZb+Hd8n0grnEcf2FOMn6lWox7vp5KAan3D2hnzM= +github.com/databricks/databricks-sdk-go v0.19.1/go.mod h1:Bt/3i3ry/rQdE6Y+psvkAENlp+LzJHaQK5PsLIstQb4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -52,8 +55,11 @@ github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBD github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= +github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= github.com/go-git/go-billy/v5 v5.4.1 h1:Uwp5tDRkPr+l/TnbHOQzp+tmJfLceOlbVucgpTz8ix4= +github.com/go-git/go-billy/v5 v5.4.1/go.mod h1:vjbugF6Fz7JIflbVpl1hJsGjSHNltrSw45YK/ukIvQg= github.com/go-git/go-git/v5 v5.6.1 h1:q4ZRqQl4pR/ZJHc1L5CFjGA1a10u76aV1iC+nh+bHsk= +github.com/go-git/go-git/v5 v5.6.1/go.mod h1:mvyoL6Unz0PiTQrGQfSfiLFhBH1c1e84ylC2MDs4ee8= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= @@ -84,16 +90,18 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= -github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= -github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= +github.com/google/s2a-go v0.1.5 h1:8IYp3w9nysqv3JH+NJgXJzGbDHzLOTj43BmSkp+O7qg= +github.com/google/s2a-go v0.1.5/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvkiqTYKBCKLNmlge2eVjoZfySzM= github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w= -github.com/googleapis/gax-go/v2 v2.11.0 h1:9V9PWXEsWnPpQhu/PeQIkS4eGzMlTLGgt80cUUI8Ki4= +github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= +github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= @@ -110,7 +118,9 @@ github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -124,6 +134,7 @@ github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D github.com/nwidger/jsoncolor v0.3.2 h1:rVJJlwAWDJShnbTYOQ5RM7yTA20INyKXlJ/fg4JMhHQ= github.com/nwidger/jsoncolor v0.3.2/go.mod h1:Cs34umxLbJvgBMnVNVqhji9BhoT/N/KinHqZptQ7cf4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -134,7 +145,9 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI= github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/skeema/knownhosts v1.1.0 h1:Wvr9V0MxhjRbl3f9nMnKnFfiWTJmtECJ9Njkea3ysW0= +github.com/skeema/knownhosts v1.1.0/go.mod h1:sKFq3RD6/TKZkSWn8boUbDC7Qkgcv+8XXijpFO6roag= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= @@ -153,6 +166,7 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/whilp/git-urls v1.0.0 h1:95f6UMWN5FKW71ECsXRUd3FVYiXdrE7aX4NZKcPmIjU= github.com/whilp/git-urls v1.0.0/go.mod h1:J16SAmobsqc3Qcy98brfl5f5+e0clUvg1krgwk/qCfE= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0= github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= @@ -163,8 +177,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= -golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 h1:LGJsf5LRplCck6jUCH3dBL2dmycNruWNF5xugkSlfXw= golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= @@ -187,12 +201,12 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= -golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8= -golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI= +golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU= +golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -217,20 +231,20 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= -golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= +golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= +golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -243,8 +257,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.129.0 h1:2XbdjjNfFPXQyufzQVwPf1RRnHH8Den2pfNE2jw7L8w= -google.golang.org/api v0.129.0/go.mod h1:dFjiXlanKwWE3612X97llhsoI36FAoIiRj3aTl5b/zE= +google.golang.org/api v0.138.0 h1:K/tVp05MxNVbHShRw9m7e9VJGdagNeTdMzqPH7AUqr0= +google.golang.org/api v0.138.0/go.mod h1:4xyob8CxC+0GChNBvEUAk8VBKNvYOTWM9T3v3UfRxuY= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= @@ -253,8 +267,8 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 h1:wukfNtZmZUurLN/atp2hiIeTKn7QJWIQdHzqmsOnAOk= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -263,8 +277,8 @@ google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTp google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= +google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -283,6 +297,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= diff --git a/internal/acc/helpers.go b/internal/acc/helpers.go index aa9902745..f98001346 100644 --- a/internal/acc/helpers.go +++ b/internal/acc/helpers.go @@ -6,7 +6,6 @@ import ( "os" "strings" "testing" - "time" ) // GetEnvOrSkipTest proceeds with test only with that env variable. @@ -22,7 +21,6 @@ const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" // RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-") func RandomName(prefix ...string) string { - rand.Seed(time.Now().UnixNano()) randLen := 12 b := make([]byte, randLen) for i := range b { diff --git a/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json b/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json new file mode 100644 index 000000000..cfed842cb --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json @@ -0,0 +1,8 @@ +{ + "properties": { + "unique_id": { + "type": "string", + "description": "Unique ID for job name" + } + } +} diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl b/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl new file mode 100644 index 000000000..c0e840c85 --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl @@ -0,0 +1,8 @@ +bundle: + name: deploy-then-remove + +workspace: + root_path: "~/.bundle/{{.unique_id}}" + +include: + - "./*.yml" diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py b/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py new file mode 100644 index 000000000..11b15b1a4 --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py @@ -0,0 +1 @@ +print("hello") diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl b/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl new file mode 100644 index 000000000..b74344e4c --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl @@ -0,0 +1,7 @@ +resources: + pipelines: + bar: + name: test-bundle-pipeline-{{.unique_id}} + libraries: + - notebook: + path: "./foo.py" diff --git a/internal/bundle/bundles/empty_bundle/databricks.yml b/internal/bundle/bundles/empty_bundle/databricks.yml new file mode 100644 index 000000000..efc627820 --- /dev/null +++ b/internal/bundle/bundles/empty_bundle/databricks.yml @@ -0,0 +1,2 @@ +bundle: + name: abc diff --git a/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json b/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json new file mode 100644 index 000000000..f7f4b6342 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json @@ -0,0 +1,21 @@ +{ + "properties": { + "project_name": { + "type": "string", + "default": "my_test_code", + "description": "Unique name for this project" + }, + "spark_version": { + "type": "string", + "description": "Spark version used for job cluster" + }, + "node_type_id": { + "type": "string", + "description": "Node type id for job cluster" + }, + "unique_id": { + "type": "string", + "description": "Unique ID for job name" + } + } +} diff --git a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl new file mode 100644 index 000000000..e715cdf1e --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl @@ -0,0 +1,24 @@ +bundle: + name: wheel-task + +workspace: + root_path: "~/.bundle/{{.unique_id}}" + +resources: + jobs: + some_other_job: + name: "[${bundle.target}] Test Wheel Job {{.unique_id}}" + tasks: + - task_key: TestTask + new_cluster: + num_workers: 1 + spark_version: "{{.spark_version}}" + node_type_id: "{{.node_type_id}}" + python_wheel_task: + package_name: my_test_code + entry_point: run + parameters: + - "one" + - "two" + libraries: + - whl: ./dist/*.whl diff --git a/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl b/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl new file mode 100644 index 000000000..b528657b1 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import {{.project_name}} + +setup( + name="{{.project_name}}", + version={{.project_name}}.__version__, + author={{.project_name}}.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my example wheel", + packages=find_packages(include=["{{.project_name}}"]), + entry_points={"group1": "run={{.project_name}}.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py new file mode 100644 index 000000000..909f1f322 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py @@ -0,0 +1,2 @@ +__version__ = "0.0.1" +__author__ = "Databricks" diff --git a/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py new file mode 100644 index 000000000..ea918ce2d --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py @@ -0,0 +1,16 @@ +""" +The entry point of the Python Wheel +""" + +import sys + + +def main(): + # This method will print the provided arguments + print("Hello from my func") + print("Got arguments:") + print(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/internal/bundle/deploy_then_remove_resources_test.go b/internal/bundle/deploy_then_remove_resources_test.go new file mode 100644 index 000000000..73860593c --- /dev/null +++ b/internal/bundle/deploy_then_remove_resources_test.go @@ -0,0 +1,55 @@ +package bundle + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/internal" + "github.com/databricks/databricks-sdk-go" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAccBundleDeployThenRemoveResources(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, "deploy_then_remove_resources", map[string]any{ + "unique_id": uniqueId, + }) + require.NoError(t, err) + + // deploy pipeline + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + // assert pipeline is created + pipelineName := "test-bundle-pipeline-" + uniqueId + pipeline, err := w.Pipelines.GetByName(context.Background(), pipelineName) + require.NoError(t, err) + assert.Equal(t, pipeline.Name, pipelineName) + + // delete resources.yml + err = os.Remove(filepath.Join(bundleRoot, "resources.yml")) + require.NoError(t, err) + + // deploy again + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + // assert pipeline is deleted + _, err = w.Pipelines.GetByName(context.Background(), pipelineName) + assert.ErrorContains(t, err, "does not exist") + + t.Cleanup(func() { + err = destroyBundle(t, bundleRoot) + require.NoError(t, err) + }) +} diff --git a/internal/bundle/empty_bundle_test.go b/internal/bundle/empty_bundle_test.go new file mode 100644 index 000000000..9b39368f4 --- /dev/null +++ b/internal/bundle/empty_bundle_test.go @@ -0,0 +1,37 @@ +package bundle + +import ( + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/internal" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestAccEmptyBundleDeploy(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + // create empty bundle + tmpDir := t.TempDir() + f, err := os.Create(filepath.Join(tmpDir, "databricks.yml")) + require.NoError(t, err) + + bundleRoot := fmt.Sprintf(`bundle: + name: %s`, uuid.New().String()) + _, err = f.WriteString(bundleRoot) + require.NoError(t, err) + f.Close() + + // deploy empty bundle + err = deployBundle(t, tmpDir) + require.NoError(t, err) + + t.Cleanup(func() { + err = destroyBundle(t, tmpDir) + require.NoError(t, err) + }) +} diff --git a/internal/bundle/helpers.go b/internal/bundle/helpers.go new file mode 100644 index 000000000..3fd4eabc9 --- /dev/null +++ b/internal/bundle/helpers.go @@ -0,0 +1,70 @@ +package bundle + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/internal" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/cli/libs/template" +) + +func initTestTemplate(t *testing.T, templateName string, config map[string]any) (string, error) { + templateRoot := filepath.Join("bundles", templateName) + + bundleRoot := t.TempDir() + configFilePath, err := writeConfigFile(t, config) + if err != nil { + return "", err + } + + ctx := root.SetWorkspaceClient(context.Background(), nil) + cmd := cmdio.NewIO(flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "bundles") + ctx = cmdio.InContext(ctx, cmd) + + err = template.Materialize(ctx, configFilePath, templateRoot, bundleRoot) + return bundleRoot, err +} + +func writeConfigFile(t *testing.T, config map[string]any) (string, error) { + bytes, err := json.Marshal(config) + if err != nil { + return "", err + } + + dir := t.TempDir() + filepath := filepath.Join(dir, "config.json") + t.Log("Configuration for template: ", string(bytes)) + + err = os.WriteFile(filepath, bytes, 0644) + return filepath, err +} + +func deployBundle(t *testing.T, path string) error { + t.Setenv("BUNDLE_ROOT", path) + c := internal.NewCobraTestRunner(t, "bundle", "deploy", "--force-lock") + _, _, err := c.Run() + return err +} + +func runResource(t *testing.T, path string, key string) (string, error) { + ctx := context.Background() + ctx = cmdio.NewContext(ctx, cmdio.Default()) + + c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "run", key) + stdout, _, err := c.Run() + return stdout.String(), err +} + +func destroyBundle(t *testing.T, path string) error { + t.Setenv("BUNDLE_ROOT", path) + c := internal.NewCobraTestRunner(t, "bundle", "destroy", "--auto-approve") + _, _, err := c.Run() + return err +} diff --git a/internal/bundle/python_wheel_test.go b/internal/bundle/python_wheel_test.go new file mode 100644 index 000000000..ee5d897d6 --- /dev/null +++ b/internal/bundle/python_wheel_test.go @@ -0,0 +1,43 @@ +package bundle + +import ( + "testing" + + "github.com/databricks/cli/internal" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestAccPythonWheelTaskDeployAndRun(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + var nodeTypeId string + if env == "gcp" { + nodeTypeId = "n1-standard-4" + } else if env == "aws" { + nodeTypeId = "i3.xlarge" + } else { + nodeTypeId = "Standard_DS4_v2" + } + + bundleRoot, err := initTestTemplate(t, "python_wheel_task", map[string]any{ + "node_type_id": nodeTypeId, + "unique_id": uuid.New().String(), + "spark_version": "13.2.x-snapshot-scala2.12", + }) + require.NoError(t, err) + + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + t.Cleanup(func() { + destroyBundle(t, bundleRoot) + }) + + out, err := runResource(t, bundleRoot, "some_other_job") + require.NoError(t, err) + require.Contains(t, out, "Hello from my func") + require.Contains(t, out, "Got arguments:") + require.Contains(t, out, "['python', 'one', 'two']") +} diff --git a/internal/fs_cat_test.go b/internal/fs_cat_test.go index 5d6952f4f..f3c8e59cd 100644 --- a/internal/fs_cat_test.go +++ b/internal/fs_cat_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFsCatForDbfs(t *testing.T) { +func TestAccFsCatForDbfs(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -33,21 +33,21 @@ func TestFsCatForDbfs(t *testing.T) { assert.Equal(t, "abc", stdout.String()) } -func TestFsCatForDbfsOnNonExistentFile(t *testing.T) { +func TestAccFsCatForDbfsOnNonExistentFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "cat", "dbfs:/non-existent-file") assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsCatForDbfsInvalidScheme(t *testing.T) { +func TestAccFsCatForDbfsInvalidScheme(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file") - assert.ErrorContains(t, err, "expected dbfs path (with the dbfs:/ prefix): dab:/non-existent-file") + assert.ErrorContains(t, err, "invalid scheme: dab") } -func TestFsCatDoesNotSupportOutputModeJson(t *testing.T) { +func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() diff --git a/internal/fs_ls_test.go b/internal/fs_ls_test.go index 885fc31f9..d21817284 100644 --- a/internal/fs_ls_test.go +++ b/internal/fs_ls_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFsLsForDbfs(t *testing.T) { +func TestAccFsLsForDbfs(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -51,7 +51,7 @@ func TestFsLsForDbfs(t *testing.T) { assert.Equal(t, float64(3), parsedStdout[1]["size"]) } -func TestFsLsForDbfsWithAbsolutePaths(t *testing.T) { +func TestAccFsLsForDbfsWithAbsolutePaths(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -87,7 +87,7 @@ func TestFsLsForDbfsWithAbsolutePaths(t *testing.T) { assert.Equal(t, float64(3), parsedStdout[1]["size"]) } -func TestFsLsForDbfsOnFile(t *testing.T) { +func TestAccFsLsForDbfsOnFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -108,7 +108,7 @@ func TestFsLsForDbfsOnFile(t *testing.T) { assert.Regexp(t, regexp.MustCompile("not a directory: .*/a/hello.txt"), err.Error()) } -func TestFsLsForDbfsOnEmptyDir(t *testing.T) { +func TestAccFsLsForDbfsOnEmptyDir(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) w, err := databricks.NewWorkspaceClient() @@ -126,16 +126,16 @@ func TestFsLsForDbfsOnEmptyDir(t *testing.T) { assert.Equal(t, 0, len(parsedStdout)) } -func TestFsLsForDbfsForNonexistingDir(t *testing.T) { +func TestAccFsLsForDbfsForNonexistingDir(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "ls", "dbfs:/john-cena", "--output=json") assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsLsWithoutScheme(t *testing.T) { +func TestAccFsLsWithoutScheme(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "ls", "/ray-mysterio", "--output=json") - assert.ErrorContains(t, err, "expected dbfs path (with the dbfs:/ prefix): /ray-mysterio") + assert.ErrorIs(t, err, fs.ErrNotExist) } diff --git a/internal/mkdir_test.go b/internal/fs_mkdir_test.go similarity index 84% rename from internal/mkdir_test.go rename to internal/fs_mkdir_test.go index 7c96e63b1..25117d532 100644 --- a/internal/mkdir_test.go +++ b/internal/fs_mkdir_test.go @@ -3,6 +3,7 @@ package internal import ( "context" "path" + "regexp" "strings" "testing" @@ -12,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TesFsMkdirCreatesDirectory(t *testing.T) { +func TestAccFsMkdirCreatesDirectory(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -36,7 +37,7 @@ func TesFsMkdirCreatesDirectory(t *testing.T) { assert.Equal(t, true, info.IsDir()) } -func TestFsMkdirCreatesMultipleDirectories(t *testing.T) { +func TestAccFsMkdirCreatesMultipleDirectories(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -72,7 +73,7 @@ func TestFsMkdirCreatesMultipleDirectories(t *testing.T) { assert.Equal(t, true, infoC.IsDir()) } -func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { +func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -93,7 +94,7 @@ func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { assert.Equal(t, "", stdout.String()) } -func TestFsMkdirWhenFileExistsAtPath(t *testing.T) { +func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -110,5 +111,7 @@ func TestFsMkdirWhenFileExistsAtPath(t *testing.T) { // assert run fails _, _, err = RequireErrorRun(t, "fs", "mkdir", "dbfs:"+path.Join(tmpDir, "hello")) - assert.ErrorContains(t, err, "Cannot create directory") + // Different cloud providers return different errors. + regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) + assert.Regexp(t, regex, err.Error()) } diff --git a/internal/rm_test.go b/internal/fs_rm_test.go similarity index 86% rename from internal/rm_test.go rename to internal/fs_rm_test.go index dd6a28593..1bee06c74 100644 --- a/internal/rm_test.go +++ b/internal/fs_rm_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFsRmForFile(t *testing.T) { +func TestAccFsRmForFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -45,7 +45,7 @@ func TestFsRmForFile(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsRmForEmptyDirectory(t *testing.T) { +func TestAccFsRmForEmptyDirectory(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -77,7 +77,7 @@ func TestFsRmForEmptyDirectory(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsRmForNonEmptyDirectory(t *testing.T) { +func TestAccFsRmForNonEmptyDirectory(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -101,19 +101,19 @@ func TestFsRmForNonEmptyDirectory(t *testing.T) { // Run rm command _, _, err = RequireErrorRun(t, "fs", "rm", "dbfs:"+path.Join(tmpDir, "avacado")) - assert.ErrorContains(t, err, "Non-recursive delete of non-empty directory") + assert.ErrorIs(t, err, fs.ErrInvalid) + assert.ErrorContains(t, err, "directory not empty") } -func TestFsRmForNonExistentFile(t *testing.T) { +func TestAccFsRmForNonExistentFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - // No error is returned on command run - stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", "dbfs:/does-not-exist") - assert.Equal(t, "", stderr.String()) - assert.Equal(t, "", stdout.String()) + // Expect error if file does not exist + _, _, err := RequireErrorRun(t, "fs", "rm", "dbfs:/does-not-exist") + assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsRmForNonEmptyDirectoryWithRecursiveFlag(t *testing.T) { +func TestAccFsRmForNonEmptyDirectoryWithRecursiveFlag(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() diff --git a/internal/git_clone_test.go b/internal/git_clone_test.go new file mode 100644 index 000000000..3fb69b924 --- /dev/null +++ b/internal/git_clone_test.go @@ -0,0 +1,64 @@ +package internal + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/libs/git" + "github.com/stretchr/testify/assert" +) + +func TestAccGitClone(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + tmpDir := t.TempDir() + ctx := context.Background() + var err error + + err = git.Clone(ctx, "https://github.com/databricks/databricks-empty-ide-project.git", "", tmpDir) + assert.NoError(t, err) + + // assert repo content + assert.NoError(t, err) + b, err := os.ReadFile(filepath.Join(tmpDir, "README-IDE.md")) + assert.NoError(t, err) + assert.Contains(t, string(b), "This folder contains a project that was synchronized from an IDE.") + + // assert current branch is ide, ie default for the repo + b, err = os.ReadFile(filepath.Join(tmpDir, ".git/HEAD")) + assert.NoError(t, err) + assert.Contains(t, string(b), "ide") +} + +func TestAccGitCloneWithOnlyRepoNameOnAlternateBranch(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + tmpDir := t.TempDir() + ctx := context.Background() + var err error + + err = git.Clone(ctx, "notebook-best-practices", "dais-2022", tmpDir) + + // assert on repo content + assert.NoError(t, err) + b, err := os.ReadFile(filepath.Join(tmpDir, "README.md")) + assert.NoError(t, err) + assert.Contains(t, string(b), "Software engineering best practices for Databricks notebooks") + + // assert current branch is main, ie default for the repo + b, err = os.ReadFile(filepath.Join(tmpDir, ".git/HEAD")) + assert.NoError(t, err) + assert.Contains(t, string(b), "dais-2022") +} + +func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + tmpDir := t.TempDir() + + err := git.Clone(context.Background(), "https://github.com/monalisa/doesnot-exist.git", "", tmpDir) + // Expect the error to originate from shelling out to `git clone` + assert.ErrorContains(t, err, "git clone failed:") +} diff --git a/internal/helpers.go b/internal/helpers.go index 449b6d9ab..68c00019a 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -15,7 +15,7 @@ import ( "testing" "time" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd" _ "github.com/databricks/cli/cmd/version" "github.com/spf13/cobra" "github.com/spf13/pflag" @@ -37,7 +37,6 @@ func GetEnvOrSkipTest(t *testing.T, name string) string { // RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-") func RandomName(prefix ...string) string { - rand.Seed(time.Now().UnixNano()) randLen := 12 b := make([]byte, randLen) for i := range b { @@ -59,6 +58,8 @@ type cobraTestRunner struct { stdout bytes.Buffer stderr bytes.Buffer + ctx context.Context + // Line-by-line output. // Background goroutines populate these channels by reading from stdout/stderr pipes. stdoutLines <-chan string @@ -117,7 +118,7 @@ func (t *cobraTestRunner) RunBackground() { var stdoutW, stderrW io.WriteCloser stdoutR, stdoutW = io.Pipe() stderrR, stderrW = io.Pipe() - root := root.RootCmd + root := cmd.New(context.Background()) root.SetOut(stdoutW) root.SetErr(stderrW) root.SetArgs(t.args) @@ -129,7 +130,7 @@ func (t *cobraTestRunner) RunBackground() { t.registerFlagCleanup(root) errch := make(chan error) - ctx, cancel := context.WithCancel(context.Background()) + ctx, cancel := context.WithCancel(t.ctx) // Tee stdout/stderr to buffers. stdoutR = io.TeeReader(stdoutR, &t.stdout) @@ -235,6 +236,15 @@ func (c *cobraTestRunner) Eventually(condition func() bool, waitFor time.Duratio func NewCobraTestRunner(t *testing.T, args ...string) *cobraTestRunner { return &cobraTestRunner{ T: t, + ctx: context.Background(), + args: args, + } +} + +func NewCobraTestRunnerWithContext(t *testing.T, ctx context.Context, args ...string) *cobraTestRunner { + return &cobraTestRunner{ + T: t, + ctx: ctx, args: args, } } diff --git a/internal/locker_test.go b/internal/locker_test.go index 2c7e7aa8e..661838ecc 100644 --- a/internal/locker_test.go +++ b/internal/locker_test.go @@ -90,7 +90,7 @@ func TestAccLock(t *testing.T) { indexOfAnInactiveLocker = i } assert.ErrorContains(t, lockerErrs[i], "lock acquired by") - assert.ErrorContains(t, lockerErrs[i], "Use --force to override") + assert.ErrorContains(t, lockerErrs[i], "Use --force-lock to override") } } assert.Equal(t, 1, countActive, "Exactly one locker should successfull acquire the lock") diff --git a/internal/secrets_test.go b/internal/secrets_test.go index 1e9c86abf..b030071bb 100644 --- a/internal/secrets_test.go +++ b/internal/secrets_test.go @@ -77,13 +77,6 @@ func TestSecretsPutSecretStringValue(tt *testing.T) { func TestSecretsPutSecretBytesValue(tt *testing.T) { ctx, t := acc.WorkspaceTest(tt) - - if true { - // Uncomment below to run this test in isolation. - // To be addressed once none of the commands taint global state. - t.Skip("skipping because the test above clobbers global state") - } - scope := temporarySecretScope(ctx, t) key := "test-key" value := []byte{0x00, 0x01, 0x02, 0x03} diff --git a/internal/sync_test.go b/internal/sync_test.go index 09418a855..bc1cbd914 100644 --- a/internal/sync_test.go +++ b/internal/sync_test.go @@ -159,7 +159,7 @@ func (a *syncTest) remoteFileContent(ctx context.Context, relativePath string, e var res []byte a.c.Eventually(func() bool { - err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, &res) + err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &res) require.NoError(a.t, err) actualContent := string(res) return actualContent == expectedContent @@ -509,12 +509,12 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { // Hypothetical repo path doesn't exist. nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("doesnt-exist-")) - err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath, nil) assert.ErrorContains(t, err, " does not exist; please create it first") // Paths nested under a hypothetical repo path should yield the same error. nestedPath := path.Join(nonExistingRepoPath, "nested/directory") - err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath, nil) assert.ErrorContains(t, err, " does not exist; please create it first") } @@ -526,12 +526,12 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { _, remoteRepoPath := setupRepo(t, wsc, ctx) // Repo itself is usable. - err := sync.EnsureRemotePathIsUsable(ctx, wsc, remoteRepoPath) + err := sync.EnsureRemotePathIsUsable(ctx, wsc, remoteRepoPath, nil) assert.NoError(t, err) // Path nested under repo path is usable. nestedPath := path.Join(remoteRepoPath, "nested/directory") - err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath, nil) assert.NoError(t, err) // Verify that the directory has been created. @@ -549,7 +549,7 @@ func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) { require.NoError(t, err) remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("ensure-path-exists-test-")) - err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath, me) assert.NoError(t, err) // Clean up directory after test. diff --git a/internal/testutil/env.go b/internal/testutil/env.go new file mode 100644 index 000000000..11a610189 --- /dev/null +++ b/internal/testutil/env.go @@ -0,0 +1,37 @@ +package testutil + +import ( + "os" + "runtime" + "strings" + "testing" +) + +// CleanupEnvironment sets up a pristine environment containing only $PATH and $HOME. +// The original environment is restored upon test completion. +// Note: use of this function is incompatible with parallel execution. +func CleanupEnvironment(t *testing.T) { + // Restore environment when test finishes. + environ := os.Environ() + t.Cleanup(func() { + // Restore original environment. + for _, kv := range environ { + kvs := strings.SplitN(kv, "=", 2) + os.Setenv(kvs[0], kvs[1]) + } + }) + + path := os.Getenv("PATH") + pwd := os.Getenv("PWD") + os.Clearenv() + + // We use t.Setenv instead of os.Setenv because the former actively + // prevents a test being run with t.Parallel. Modifying the environment + // within a test is not compatible with running tests in parallel + // because of isolation; the environment is scoped to the process. + t.Setenv("PATH", path) + t.Setenv("HOME", pwd) + if runtime.GOOS == "windows" { + t.Setenv("USERPROFILE", pwd) + } +} diff --git a/libs/auth/oauth.go b/libs/auth/oauth.go index b7e0ce2f0..dd27d04b2 100644 --- a/libs/auth/oauth.go +++ b/libs/auth/oauth.go @@ -2,6 +2,7 @@ package auth import ( "context" + "crypto/rand" "crypto/sha256" _ "embed" "encoding/base64" @@ -9,7 +10,6 @@ import ( "errors" "fmt" "io" - "math/rand" "net" "net/http" "strings" @@ -255,7 +255,6 @@ func (a *PersistentAuth) stateAndPKCE() (string, *authhandler.PKCEParams) { } func (a *PersistentAuth) randomString(size int) string { - rand.Seed(time.Now().UnixNano()) raw := make([]byte, size) _, _ = rand.Read(raw) return base64.RawURLEncoding.EncodeToString(raw) diff --git a/libs/auth/service_principal.go b/libs/auth/service_principal.go new file mode 100644 index 000000000..cb488d16e --- /dev/null +++ b/libs/auth/service_principal.go @@ -0,0 +1,15 @@ +package auth + +import ( + "github.com/google/uuid" +) + +// Determines whether a given user id is a service principal. +// This function uses a heuristic: if the user id is a UUID, then we assume +// it's a service principal. Unfortunately, the service principal listing API is too +// slow for our purposes. And the "users" and "service principals get" APIs +// only allow access by workspace admins. +func IsServicePrincipal(userId string) bool { + _, err := uuid.Parse(userId) + return err == nil +} diff --git a/libs/auth/service_principal_test.go b/libs/auth/service_principal_test.go new file mode 100644 index 000000000..95e8ab5cb --- /dev/null +++ b/libs/auth/service_principal_test.go @@ -0,0 +1,19 @@ +package auth + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsServicePrincipal_ValidUUID(t *testing.T) { + userId := "8b948b2e-d2b5-4b9e-8274-11b596f3b652" + isSP := IsServicePrincipal(userId) + assert.True(t, isSP, "Expected user ID to be recognized as a service principal") +} + +func TestIsServicePrincipal_InvalidUUID(t *testing.T) { + userId := "invalid" + isSP := IsServicePrincipal(userId) + assert.False(t, isSP, "Expected user ID to not be recognized as a service principal") +} diff --git a/libs/cmdio/io.go b/libs/cmdio/io.go index a60231c09..cf405a7a4 100644 --- a/libs/cmdio/io.go +++ b/libs/cmdio/io.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "os" + "slices" "strings" "time" @@ -12,7 +13,6 @@ import ( "github.com/databricks/cli/libs/flags" "github.com/manifoldco/promptui" "github.com/mattn/go-isatty" - "golang.org/x/exp/slices" ) // cmdIO is the private instance, that is not supposed to be accessed @@ -140,8 +140,8 @@ func (c *cmdIO) Select(names map[string]string, label string) (id string, err er for k, v := range names { items = append(items, tuple{k, v}) } - slices.SortFunc(items, func(a, b tuple) bool { - return a.Name < b.Name + slices.SortFunc(items, func(a, b tuple) int { + return strings.Compare(a.Name, b.Name) }) idx, _, err := (&promptui.Select{ Label: label, @@ -205,6 +205,42 @@ func Prompt(ctx context.Context) *promptui.Prompt { } } +func RunSelect(ctx context.Context, prompt *promptui.Select) (int, string, error) { + c := fromContext(ctx) + prompt.Stdin = io.NopCloser(c.in) + prompt.Stdout = nopWriteCloser{c.err} + return prompt.Run() +} + +func (c *cmdIO) simplePrompt(label string) *promptui.Prompt { + return &promptui.Prompt{ + Label: label, + Stdin: io.NopCloser(c.in), + Stdout: nopWriteCloser{c.out}, + } +} + +func (c *cmdIO) SimplePrompt(label string) (value string, err error) { + return c.simplePrompt(label).Run() +} + +func SimplePrompt(ctx context.Context, label string) (value string, err error) { + c := fromContext(ctx) + return c.SimplePrompt(label) +} + +func (c *cmdIO) DefaultPrompt(label, defaultValue string) (value string, err error) { + prompt := c.simplePrompt(label) + prompt.Default = defaultValue + prompt.AllowEdit = true + return prompt.Run() +} + +func DefaultPrompt(ctx context.Context, label, defaultValue string) (value string, err error) { + c := fromContext(ctx) + return c.DefaultPrompt(label, defaultValue) +} + func (c *cmdIO) Spinner(ctx context.Context) chan string { var sp *spinner.Spinner if c.interactive { diff --git a/libs/cmdio/logger.go b/libs/cmdio/logger.go index a507c5cce..7d760b998 100644 --- a/libs/cmdio/logger.go +++ b/libs/cmdio/logger.go @@ -7,8 +7,10 @@ import ( "fmt" "io" "os" + "strings" "github.com/databricks/cli/libs/flags" + "github.com/manifoldco/promptui" ) // This is the interface for all io interactions with a user @@ -74,31 +76,94 @@ func LogError(ctx context.Context, err error) { }) } -func Ask(ctx context.Context, question string) (bool, error) { +func Ask(ctx context.Context, question, defaultVal string) (string, error) { logger, ok := FromContext(ctx) if !ok { logger = Default() } - return logger.Ask(question) + return logger.Ask(question, defaultVal) } -func (l *Logger) Ask(question string) (bool, error) { - if l.Mode == flags.ModeJson { - return false, fmt.Errorf("question prompts are not supported in json mode") +func AskYesOrNo(ctx context.Context, question string) (bool, error) { + logger, ok := FromContext(ctx) + if !ok { + logger = Default() } - l.Writer.Write([]byte(question)) - ans, err := l.Reader.ReadString('\n') + // Add acceptable answers to the question prompt. + question += ` [y/n]` + // Ask the question + ans, err := logger.Ask(question, "") if err != nil { return false, err } - if ans == "y\n" { + if ans == "y" { return true, nil - } else { - return false, nil } + return false, nil +} + +func AskSelect(ctx context.Context, question string, choices []string) (string, error) { + logger, ok := FromContext(ctx) + if !ok { + logger = Default() + } + return logger.AskSelect(question, choices) +} + +func (l *Logger) AskSelect(question string, choices []string) (string, error) { + if l.Mode == flags.ModeJson { + return "", fmt.Errorf("question prompts are not supported in json mode") + } + + prompt := promptui.Select{ + Label: question, + Items: choices, + HideHelp: true, + Templates: &promptui.SelectTemplates{ + Label: "{{.}}: ", + Selected: fmt.Sprintf("%s: {{.}}", question), + }, + } + + _, ans, err := prompt.Run() + if err != nil { + return "", err + } + return ans, nil +} + +func (l *Logger) Ask(question string, defaultVal string) (string, error) { + if l.Mode == flags.ModeJson { + return "", fmt.Errorf("question prompts are not supported in json mode") + } + + // Add default value to question prompt. + if defaultVal != "" { + question += fmt.Sprintf(` [%s]`, defaultVal) + } + question += `: ` + + // print prompt + _, err := l.Writer.Write([]byte(question)) + if err != nil { + return "", err + } + + // read user input. Trim new line characters + ans, err := l.Reader.ReadString('\n') + if err != nil { + return "", err + } + ans = strings.Trim(ans, "\n\r") + + // Return default value if user just presses enter + if ans == "" { + return defaultVal, nil + } + return ans, nil } func (l *Logger) writeJson(event Event) { diff --git a/libs/cmdio/logger_test.go b/libs/cmdio/logger_test.go index ff715b11e..c5c00d022 100644 --- a/libs/cmdio/logger_test.go +++ b/libs/cmdio/logger_test.go @@ -1,6 +1,7 @@ package cmdio import ( + "context" "testing" "github.com/databricks/cli/libs/flags" @@ -9,6 +10,14 @@ import ( func TestAskFailedInJsonMode(t *testing.T) { l := NewLogger(flags.ModeJson) - _, err := l.Ask("What is your spirit animal?") + _, err := l.Ask("What is your spirit animal?", "") assert.ErrorContains(t, err, "question prompts are not supported in json mode") } + +func TestAskChoiceFailsInJsonMode(t *testing.T) { + l := NewLogger(flags.ModeJson) + ctx := NewContext(context.Background(), l) + + _, err := AskSelect(ctx, "what is a question?", []string{"b", "c", "a"}) + assert.EqualError(t, err, "question prompts are not supported in json mode") +} diff --git a/libs/cmdio/testing.go b/libs/cmdio/testing.go new file mode 100644 index 000000000..43592489e --- /dev/null +++ b/libs/cmdio/testing.go @@ -0,0 +1,46 @@ +package cmdio + +import ( + "bufio" + "context" + "io" +) + +type Test struct { + Done context.CancelFunc + + Stdin *bufio.Writer + Stdout *bufio.Reader + Stderr *bufio.Reader +} + +func SetupTest(ctx context.Context) (context.Context, *Test) { + rin, win := io.Pipe() + rout, wout := io.Pipe() + rerr, werr := io.Pipe() + + cmdio := &cmdIO{ + interactive: true, + in: rin, + out: wout, + err: werr, + } + + ctx, cancel := context.WithCancel(ctx) + ctx = InContext(ctx, cmdio) + + // Wait for context to be done, so we can drain stdin and close the pipes. + go func() { + <-ctx.Done() + rin.Close() + wout.Close() + werr.Close() + }() + + return ctx, &Test{ + Done: cancel, + Stdin: bufio.NewWriter(win), + Stdout: bufio.NewReader(rout), + Stderr: bufio.NewReader(rerr), + } +} diff --git a/libs/databrickscfg/profiles.go b/libs/databrickscfg/profiles.go index 7892bddd1..864000d03 100644 --- a/libs/databrickscfg/profiles.go +++ b/libs/databrickscfg/profiles.go @@ -1,7 +1,9 @@ package databrickscfg import ( + "fmt" "os" + "path/filepath" "strings" "github.com/databricks/databricks-sdk-go/config" @@ -64,12 +66,34 @@ func MatchAllProfiles(p Profile) bool { return true } -const DefaultPath = "~/.databrickscfg" +// Get the path to the .databrickscfg file, falling back to the default in the current user's home directory. +func GetPath() (string, error) { + configFile := os.Getenv("DATABRICKS_CONFIG_FILE") + if configFile == "" { + configFile = "~/.databrickscfg" + } + if strings.HasPrefix(configFile, "~") { + homedir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("cannot find homedir: %w", err) + } + configFile = filepath.Join(homedir, configFile[1:]) + } + return configFile, nil +} -func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles Profiles, err error) { - f, err := config.LoadFile(path) +func Get() (*config.File, error) { + configFile, err := GetPath() if err != nil { - return + return nil, fmt.Errorf("cannot determine Databricks config file path: %w", err) + } + return config.LoadFile(configFile) +} + +func LoadProfiles(fn ProfileMatchFunction) (file string, profiles Profiles, err error) { + f, err := Get() + if err != nil { + return "", nil, fmt.Errorf("cannot load Databricks config file: %w", err) } homedir, err := os.UserHomeDir() @@ -106,7 +130,7 @@ func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles P } func ProfileCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - _, profiles, err := LoadProfiles(DefaultPath, MatchAllProfiles) + _, profiles, err := LoadProfiles(MatchAllProfiles) if err != nil { return nil, cobra.ShellCompDirectiveError } diff --git a/libs/databrickscfg/profiles_test.go b/libs/databrickscfg/profiles_test.go index 582c6658e..b1acdce92 100644 --- a/libs/databrickscfg/profiles_test.go +++ b/libs/databrickscfg/profiles_test.go @@ -32,19 +32,22 @@ func TestLoadProfilesReturnsHomedirAsTilde(t *testing.T) { } else { t.Setenv("HOME", "./testdata") } - file, _, err := LoadProfiles("./testdata/databrickscfg", func(p Profile) bool { return true }) + t.Setenv("DATABRICKS_CONFIG_FILE", "./testdata/databrickscfg") + file, _, err := LoadProfiles(func(p Profile) bool { return true }) require.NoError(t, err) assert.Equal(t, "~/databrickscfg", file) } func TestLoadProfilesMatchWorkspace(t *testing.T) { - _, profiles, err := LoadProfiles("./testdata/databrickscfg", MatchWorkspaceProfiles) + t.Setenv("DATABRICKS_CONFIG_FILE", "./testdata/databrickscfg") + _, profiles, err := LoadProfiles(MatchWorkspaceProfiles) require.NoError(t, err) assert.Equal(t, []string{"DEFAULT", "query", "foo1", "foo2"}, profiles.Names()) } func TestLoadProfilesMatchAccount(t *testing.T) { - _, profiles, err := LoadProfiles("./testdata/databrickscfg", MatchAccountProfiles) + t.Setenv("DATABRICKS_CONFIG_FILE", "./testdata/databrickscfg") + _, profiles, err := LoadProfiles(MatchAccountProfiles) require.NoError(t, err) assert.Equal(t, []string{"acc"}, profiles.Names()) } diff --git a/libs/env/context.go b/libs/env/context.go new file mode 100644 index 000000000..cf04c1ece --- /dev/null +++ b/libs/env/context.go @@ -0,0 +1,63 @@ +package env + +import ( + "context" + "os" +) + +var envContextKey int + +func copyMap(m map[string]string) map[string]string { + out := make(map[string]string, len(m)) + for k, v := range m { + out[k] = v + } + return out +} + +func getMap(ctx context.Context) map[string]string { + if ctx == nil { + return nil + } + m, ok := ctx.Value(&envContextKey).(map[string]string) + if !ok { + return nil + } + return m +} + +func setMap(ctx context.Context, m map[string]string) context.Context { + return context.WithValue(ctx, &envContextKey, m) +} + +// Lookup key in the context or the the environment. +// Context has precedence. +func Lookup(ctx context.Context, key string) (string, bool) { + m := getMap(ctx) + + // Return if the key is set in the context. + v, ok := m[key] + if ok { + return v, true + } + + // Fall back to the environment. + return os.LookupEnv(key) +} + +// Get key from the context or the environment. +// Context has precedence. +func Get(ctx context.Context, key string) string { + v, _ := Lookup(ctx, key) + return v +} + +// Set key on the context. +// +// Note: this does NOT mutate the processes' actual environment variables. +// It is only visible to other code that uses this package. +func Set(ctx context.Context, key, value string) context.Context { + m := copyMap(getMap(ctx)) + m[key] = value + return setMap(ctx, m) +} diff --git a/libs/env/context_test.go b/libs/env/context_test.go new file mode 100644 index 000000000..9ff194597 --- /dev/null +++ b/libs/env/context_test.go @@ -0,0 +1,41 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestContext(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("FOO", "bar") + + ctx0 := context.Background() + + // Get + assert.Equal(t, "bar", Get(ctx0, "FOO")) + assert.Equal(t, "", Get(ctx0, "dontexist")) + + // Lookup + v, ok := Lookup(ctx0, "FOO") + assert.True(t, ok) + assert.Equal(t, "bar", v) + v, ok = Lookup(ctx0, "dontexist") + assert.False(t, ok) + assert.Equal(t, "", v) + + // Set and get new context. + // Verify that the previous context remains unchanged. + ctx1 := Set(ctx0, "FOO", "baz") + assert.Equal(t, "baz", Get(ctx1, "FOO")) + assert.Equal(t, "bar", Get(ctx0, "FOO")) + + // Set and get new context. + // Verify that the previous contexts remains unchanged. + ctx2 := Set(ctx1, "FOO", "qux") + assert.Equal(t, "qux", Get(ctx2, "FOO")) + assert.Equal(t, "baz", Get(ctx1, "FOO")) + assert.Equal(t, "bar", Get(ctx0, "FOO")) +} diff --git a/libs/env/pkg.go b/libs/env/pkg.go new file mode 100644 index 000000000..e0be7e225 --- /dev/null +++ b/libs/env/pkg.go @@ -0,0 +1,7 @@ +package env + +// The env package provides functions for working with environment variables +// and allowing for overrides via the context.Context. This is useful for +// testing where tainting a processes' environment is at odds with parallelism. +// Use of a context.Context to store variable overrides means tests can be +// parallelized without worrying about environment variable interference. diff --git a/libs/filer/dbfs_client.go b/libs/filer/dbfs_client.go index 64eb4b77e..38e8f9f3f 100644 --- a/libs/filer/dbfs_client.go +++ b/libs/filer/dbfs_client.go @@ -7,6 +7,7 @@ import ( "io/fs" "net/http" "path" + "slices" "sort" "strings" "time" @@ -14,7 +15,6 @@ import ( "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/files" - "golang.org/x/exp/slices" ) // Type that implements fs.DirEntry for DBFS. diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go index ee7587dcc..17884d573 100644 --- a/libs/filer/files_client.go +++ b/libs/filer/files_client.go @@ -10,13 +10,13 @@ import ( "net/http" "net/url" "path" + "slices" "strings" "time" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/client" - "golang.org/x/exp/slices" ) // Type that implements fs.FileInfo for the Files API. @@ -104,11 +104,8 @@ func (w *FilesClient) Write(ctx context.Context, name string, reader io.Reader, overwrite := slices.Contains(mode, OverwriteIfExists) urlPath = fmt.Sprintf("%s?overwrite=%t", urlPath, overwrite) - err = w.apiClient.Do(ctx, http.MethodPut, urlPath, reader, nil, - func(r *http.Request) error { - r.Header.Set("Content-Type", "application/octet-stream") - return nil - }) + headers := map[string]string{"Content-Type": "application/octet-stream"} + err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, reader, nil) // Return early on success. if err == nil { @@ -136,7 +133,7 @@ func (w *FilesClient) Read(ctx context.Context, name string) (io.ReadCloser, err } var buf bytes.Buffer - err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, &buf) + err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &buf) // Return early on success. if err == nil { @@ -168,7 +165,7 @@ func (w *FilesClient) Delete(ctx context.Context, name string, mode ...DeleteMod return CannotDeleteRootError{} } - err = w.apiClient.Do(ctx, http.MethodDelete, urlPath, nil, nil) + err = w.apiClient.Do(ctx, http.MethodDelete, urlPath, nil, nil, nil) // Return early on success. if err == nil { @@ -210,11 +207,7 @@ func (w *FilesClient) Stat(ctx context.Context, name string) (fs.FileInfo, error return nil, err } - err = w.apiClient.Do(ctx, http.MethodHead, urlPath, nil, nil, - func(r *http.Request) error { - r.Header.Del("Content-Type") - return nil - }) + err = w.apiClient.Do(ctx, http.MethodHead, urlPath, nil, nil, nil) // If the HEAD requests succeeds, the file exists. if err == nil { diff --git a/libs/filer/local_client.go b/libs/filer/local_client.go index 8d960c84b..958b6277d 100644 --- a/libs/filer/local_client.go +++ b/libs/filer/local_client.go @@ -6,8 +6,7 @@ import ( "io/fs" "os" "path/filepath" - - "golang.org/x/exp/slices" + "slices" ) // LocalClient implements the [Filer] interface for the local filesystem. diff --git a/libs/filer/slice.go b/libs/filer/slice.go index c35d6e78a..077bb305f 100644 --- a/libs/filer/slice.go +++ b/libs/filer/slice.go @@ -1,6 +1,6 @@ package filer -import "golang.org/x/exp/slices" +import "slices" // sliceWithout returns a copy of the specified slice without element e, if it is present. func sliceWithout[S []E, E comparable](s S, e E) S { diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go index db06f91c2..41e35d9d1 100644 --- a/libs/filer/workspace_files_client.go +++ b/libs/filer/workspace_files_client.go @@ -11,6 +11,7 @@ import ( "net/url" "path" "regexp" + "slices" "sort" "strings" "time" @@ -19,7 +20,6 @@ import ( "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/client" "github.com/databricks/databricks-sdk-go/service/workspace" - "golang.org/x/exp/slices" ) // Type that implements fs.DirEntry for WSFS. @@ -115,7 +115,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io return err } - err = w.apiClient.Do(ctx, http.MethodPost, urlPath, body, nil) + err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, body, nil) // Return early on success. if err == nil { diff --git a/libs/fileset/fileset.go b/libs/fileset/fileset.go index 07494fe83..81b85525c 100644 --- a/libs/fileset/fileset.go +++ b/libs/fileset/fileset.go @@ -39,14 +39,14 @@ func (w *FileSet) Root() string { // Return all tracked files for Repo func (w *FileSet) All() ([]File, error) { - return w.RecursiveListFiles(w.root) + return w.recursiveListFiles() } // Recursively traverses dir in a depth first manner and returns a list of all files // that are being tracked in the FileSet (ie not being ignored for matching one of the // patterns in w.ignore) -func (w *FileSet) RecursiveListFiles(dir string) (fileList []File, err error) { - err = filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error { +func (w *FileSet) recursiveListFiles() (fileList []File, err error) { + err = filepath.WalkDir(w.root, func(path string, d fs.DirEntry, err error) error { if err != nil { return err } diff --git a/libs/fileset/glob.go b/libs/fileset/glob.go new file mode 100644 index 000000000..7a9f130bd --- /dev/null +++ b/libs/fileset/glob.go @@ -0,0 +1,49 @@ +package fileset + +import ( + "io/fs" + "os" + "path/filepath" +) + +type GlobSet struct { + root string + patterns []string +} + +func NewGlobSet(root string, includes []string) (*GlobSet, error) { + absRoot, err := filepath.Abs(root) + if err != nil { + return nil, err + } + for k := range includes { + includes[k] = filepath.Join(absRoot, filepath.FromSlash(includes[k])) + } + return &GlobSet{absRoot, includes}, nil +} + +// Return all files which matches defined glob patterns +func (s *GlobSet) All() ([]File, error) { + files := make([]File, 0) + for _, pattern := range s.patterns { + matches, err := filepath.Glob(pattern) + if err != nil { + return files, err + } + + for _, match := range matches { + matchRel, err := filepath.Rel(s.root, match) + if err != nil { + return files, err + } + + stat, err := os.Stat(match) + if err != nil { + return files, err + } + files = append(files, File{fs.FileInfoToDirEntry(stat), match, matchRel}) + } + } + + return files, nil +} diff --git a/libs/fileset/glob_test.go b/libs/fileset/glob_test.go new file mode 100644 index 000000000..f6ac7e192 --- /dev/null +++ b/libs/fileset/glob_test.go @@ -0,0 +1,65 @@ +package fileset + +import ( + "io/fs" + "os" + "path/filepath" + "slices" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestGlobFileset(t *testing.T) { + cwd, err := os.Getwd() + require.NoError(t, err) + root := filepath.Join(cwd, "..", "filer") + + entries, err := os.ReadDir(root) + require.NoError(t, err) + + g, err := NewGlobSet(root, []string{ + "./*.go", + }) + require.NoError(t, err) + + files, err := g.All() + require.NoError(t, err) + + require.Equal(t, len(files), len(entries)) + for _, f := range files { + exists := slices.ContainsFunc(entries, func(de fs.DirEntry) bool { + return de.Name() == f.Name() + }) + require.True(t, exists) + } + + g, err = NewGlobSet(root, []string{ + "./*.js", + }) + require.NoError(t, err) + + files, err = g.All() + require.NoError(t, err) + require.Equal(t, len(files), 0) +} + +func TestGlobFilesetWithRelativeRoot(t *testing.T) { + root := filepath.Join("..", "filer") + + entries, err := os.ReadDir(root) + require.NoError(t, err) + + g, err := NewGlobSet(root, []string{ + "./*.go", + }) + require.NoError(t, err) + + files, err := g.All() + require.NoError(t, err) + + require.Equal(t, len(files), len(entries)) + for _, f := range files { + require.True(t, filepath.IsAbs(f.Absolute)) + } +} diff --git a/libs/flags/log_level_flag.go b/libs/flags/log_level_flag.go index f5d305a50..836d84b70 100644 --- a/libs/flags/log_level_flag.go +++ b/libs/flags/log_level_flag.go @@ -2,12 +2,12 @@ package flags import ( "fmt" + "log/slog" "strings" "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" "golang.org/x/exp/maps" - "golang.org/x/exp/slog" ) var levels = map[string]slog.Level{ diff --git a/libs/git/clone.go b/libs/git/clone.go new file mode 100644 index 000000000..8b075cdea --- /dev/null +++ b/libs/git/clone.go @@ -0,0 +1,72 @@ +package git + +import ( + "bytes" + "context" + "errors" + "fmt" + "os/exec" + "regexp" + "strings" +) + +// source: https://stackoverflow.com/questions/59081778/rules-for-special-characters-in-github-repository-name +var githubRepoRegex = regexp.MustCompile(`^[\w-\.]+$`) + +const githubUrl = "https://github.com" +const databricksOrg = "databricks" + +type cloneOptions struct { + // Branch or tag to clone + Reference string + + // URL for the repository + RepositoryUrl string + + // Local path to clone repository at + TargetPath string +} + +func (opts cloneOptions) args() []string { + args := []string{"clone", opts.RepositoryUrl, opts.TargetPath, "--depth=1", "--no-tags"} + if opts.Reference != "" { + args = append(args, "--branch", opts.Reference) + } + return args +} + +func Clone(ctx context.Context, url, reference, targetPath string) error { + // We assume only the repository name has been if input does not contain any + // `/` characters and the url is only made up of alphanumeric characters and + // ".", "_" and "-". This repository is resolved again databricks github account. + fullUrl := url + if githubRepoRegex.MatchString(url) { + fullUrl = strings.Join([]string{githubUrl, databricksOrg, url}, "/") + } + + opts := cloneOptions{ + Reference: reference, + RepositoryUrl: fullUrl, + TargetPath: targetPath, + } + + cmd := exec.CommandContext(ctx, "git", opts.args()...) + var cmdErr bytes.Buffer + cmd.Stderr = &cmdErr + + // start git clone + err := cmd.Start() + if errors.Is(err, exec.ErrNotFound) { + return fmt.Errorf("please install git CLI to clone a repository: %w", err) + } + if err != nil { + return fmt.Errorf("git clone failed: %w", err) + } + + // wait for git clone to complete + err = cmd.Wait() + if err != nil { + return fmt.Errorf("git clone failed: %w. %s", err, cmdErr.String()) + } + return nil +} diff --git a/libs/git/clone_test.go b/libs/git/clone_test.go new file mode 100644 index 000000000..8101178fb --- /dev/null +++ b/libs/git/clone_test.go @@ -0,0 +1,34 @@ +package git + +import ( + "context" + "os/exec" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGitCloneArgs(t *testing.T) { + // case: No branch / tag specified. In this case git clones the default branch + assert.Equal(t, []string{"clone", "abc", "/def", "--depth=1", "--no-tags"}, cloneOptions{ + Reference: "", + RepositoryUrl: "abc", + TargetPath: "/def", + }.args()) + + // case: A branch is specified. + assert.Equal(t, []string{"clone", "abc", "/def", "--depth=1", "--no-tags", "--branch", "my-branch"}, cloneOptions{ + Reference: "my-branch", + RepositoryUrl: "abc", + TargetPath: "/def", + }.args()) +} + +func TestGitCloneWithGitNotFound(t *testing.T) { + // We set $PATH here so the git CLI cannot be found by the clone function + t.Setenv("PATH", "") + tmpDir := t.TempDir() + + err := Clone(context.Background(), "abc", "", tmpDir) + assert.ErrorIs(t, err, exec.ErrNotFound) +} diff --git a/libs/git/fileset.go b/libs/git/fileset.go index 885a19b99..c604ac7fa 100644 --- a/libs/git/fileset.go +++ b/libs/git/fileset.go @@ -6,7 +6,7 @@ import ( // FileSet is Git repository aware implementation of [fileset.FileSet]. // It forces checking if gitignore files have been modified every -// time a call to [FileSet.All] or [FileSet.RecursiveListFiles] is made. +// time a call to [FileSet.All] is made. type FileSet struct { fileset *fileset.FileSet view *View @@ -43,11 +43,6 @@ func (f *FileSet) All() ([]fileset.File, error) { return f.fileset.All() } -func (f *FileSet) RecursiveListFiles(dir string) ([]fileset.File, error) { - f.view.repo.taintIgnoreRules() - return f.fileset.RecursiveListFiles(dir) -} - func (f *FileSet) EnsureValidGitIgnoreExists() error { return f.view.EnsureValidGitIgnoreExists() } diff --git a/libs/git/fileset_test.go b/libs/git/fileset_test.go index 4fa2ca4b2..74133f525 100644 --- a/libs/git/fileset_test.go +++ b/libs/git/fileset_test.go @@ -10,18 +10,23 @@ import ( "github.com/stretchr/testify/require" ) -func TestFileSetRecursiveListFiles(t *testing.T) { - fileSet, err := NewFileSet("./testdata") +func testFileSetAll(t *testing.T, path string) { + fileSet, err := NewFileSet(path) require.NoError(t, err) - files, err := fileSet.RecursiveListFiles("./testdata") + files, err := fileSet.All() require.NoError(t, err) - require.Len(t, files, 6) - assert.Equal(t, filepath.Join(".gitignore"), files[0].Relative) - assert.Equal(t, filepath.Join("a", ".gitignore"), files[1].Relative) - assert.Equal(t, filepath.Join("a", "b", ".gitignore"), files[2].Relative) - assert.Equal(t, filepath.Join("a", "b", "world.txt"), files[3].Relative) - assert.Equal(t, filepath.Join("a", "hello.txt"), files[4].Relative) - assert.Equal(t, filepath.Join("databricks.yml"), files[5].Relative) + require.Len(t, files, 3) + assert.Equal(t, filepath.Join("a", "b", "world.txt"), files[0].Relative) + assert.Equal(t, filepath.Join("a", "hello.txt"), files[1].Relative) + assert.Equal(t, filepath.Join("databricks.yml"), files[2].Relative) +} + +func TestFileSetListAllInRepo(t *testing.T) { + testFileSetAll(t, "./testdata") +} + +func TestFileSetListAllInTempDir(t *testing.T) { + testFileSetAll(t, copyTestdata(t, "./testdata")) } func TestFileSetNonCleanRoot(t *testing.T) { @@ -32,10 +37,10 @@ func TestFileSetNonCleanRoot(t *testing.T) { require.NoError(t, err) files, err := fileSet.All() require.NoError(t, err) - assert.Len(t, files, 6) + assert.Len(t, files, 3) } -func TestFilesetAddsCacheDirToGitIgnore(t *testing.T) { +func TestFileSetAddsCacheDirToGitIgnore(t *testing.T) { projectDir := t.TempDir() fileSet, err := NewFileSet(projectDir) require.NoError(t, err) @@ -48,7 +53,7 @@ func TestFilesetAddsCacheDirToGitIgnore(t *testing.T) { assert.Contains(t, string(fileBytes), ".databricks") } -func TestFilesetDoesNotCacheDirToGitIgnoreIfAlreadyPresent(t *testing.T) { +func TestFileSetDoesNotCacheDirToGitIgnoreIfAlreadyPresent(t *testing.T) { projectDir := t.TempDir() gitIgnorePath := filepath.Join(projectDir, ".gitignore") diff --git a/libs/git/git.go b/libs/git/git.go deleted file mode 100644 index c5d09034c..000000000 --- a/libs/git/git.go +++ /dev/null @@ -1,80 +0,0 @@ -package git - -import ( - "fmt" - "net/url" - "os" - "path" - "strings" - - "github.com/databricks/cli/folders" - giturls "github.com/whilp/git-urls" - "gopkg.in/ini.v1" -) - -func Root() (string, error) { - wd, err := os.Getwd() - if err != nil { - return "", err - } - return folders.FindDirWithLeaf(wd, ".git") -} - -// Origin finds the git repository the project is cloned from, so that -// we could automatically verify if this project is checked out in repos -// home folder of the user according to recommended best practices. Can -// also be used to determine a good enough default project name. -func Origin() (*url.URL, error) { - root, err := Root() - if err != nil { - return nil, err - } - file := fmt.Sprintf("%s/.git/config", root) - gitConfig, err := ini.Load(file) - if err != nil { - return nil, err - } - section := gitConfig.Section(`remote "origin"`) - if section == nil { - return nil, fmt.Errorf("remote `origin` is not defined in %s", file) - } - url := section.Key("url") - if url == nil { - return nil, fmt.Errorf("git origin url is not defined") - } - return giturls.Parse(url.Value()) -} - -// HttpsOrigin returns URL in the format expected by Databricks Repos -// platform functionality. Gradually expand implementation to work with -// other formats of git URLs. -func HttpsOrigin() (string, error) { - origin, err := Origin() - if err != nil { - return "", err - } - // if current repo is checked out with a SSH key - if origin.Scheme != "https" { - origin.Scheme = "https" - } - // `git@` is not required for HTTPS, as Databricks Repos are checked - // out using an API token instead of username. But does it hold true - // for all of the git implementations? - if origin.User != nil { - origin.User = nil - } - // Remove `.git` suffix, if present. - origin.Path = strings.TrimSuffix(origin.Path, ".git") - return origin.String(), nil -} - -// RepositoryName returns repository name as last path entry from detected -// git repository up the tree or returns error if it fails to do so. -func RepositoryName() (string, error) { - origin, err := Origin() - if err != nil { - return "", err - } - base := path.Base(origin.Path) - return strings.TrimSuffix(base, ".git"), nil -} diff --git a/libs/git/git_test.go b/libs/git/git_test.go deleted file mode 100644 index 818ba8421..000000000 --- a/libs/git/git_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package git - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetGitOrigin(t *testing.T) { - this, err := RepositoryName() - assert.NoError(t, err) - assert.Equal(t, "cli", this) -} - -func TestHttpsOrigin(t *testing.T) { - url, err := HttpsOrigin() - assert.NoError(t, err) - // must pass on the upcoming forks - assert.True(t, strings.HasPrefix(url, "https://github.com"), url) - assert.True(t, strings.HasSuffix(url, "cli"), url) -} diff --git a/libs/git/repository.go b/libs/git/repository.go index 3b93669ae..9c847687d 100644 --- a/libs/git/repository.go +++ b/libs/git/repository.go @@ -12,6 +12,8 @@ import ( const gitIgnoreFileName = ".gitignore" +var GitDirectoryName = ".git" + // Repository represents a Git repository or a directory // that could later be initialized as Git repository. type Repository struct { @@ -45,7 +47,7 @@ func (r *Repository) Root() string { func (r *Repository) CurrentBranch() (string, error) { // load .git/HEAD - ref, err := LoadReferenceFile(filepath.Join(r.rootPath, ".git", "HEAD")) + ref, err := LoadReferenceFile(filepath.Join(r.rootPath, GitDirectoryName, "HEAD")) if err != nil { return "", err } @@ -62,7 +64,7 @@ func (r *Repository) CurrentBranch() (string, error) { func (r *Repository) LatestCommit() (string, error) { // load .git/HEAD - ref, err := LoadReferenceFile(filepath.Join(r.rootPath, ".git", "HEAD")) + ref, err := LoadReferenceFile(filepath.Join(r.rootPath, GitDirectoryName, "HEAD")) if err != nil { return "", err } @@ -81,7 +83,7 @@ func (r *Repository) LatestCommit() (string, error) { if err != nil { return "", err } - branchHeadRef, err := LoadReferenceFile(filepath.Join(r.rootPath, ".git", branchHeadPath)) + branchHeadRef, err := LoadReferenceFile(filepath.Join(r.rootPath, GitDirectoryName, branchHeadPath)) if err != nil { return "", err } @@ -158,6 +160,11 @@ func (r *Repository) Ignore(relPath string) (bool, error) { trailingSlash = "/" } + // Never ignore the root path (an unnamed path) + if len(parts) == 1 && parts[0] == "." { + return false, nil + } + // Walk over path prefixes to check applicable gitignore files. for i := range parts { prefix := path.Clean(strings.Join(parts[:i], "/")) @@ -186,7 +193,7 @@ func NewRepository(path string) (*Repository, error) { } real := true - rootPath, err := folders.FindDirWithLeaf(path, ".git") + rootPath, err := folders.FindDirWithLeaf(path, GitDirectoryName) if err != nil { if !os.IsNotExist(err) { return nil, err diff --git a/libs/git/testdata/.gitignore b/libs/git/testdata/.gitignore index 3d68fc1c7..5bfc9c1e7 100644 --- a/libs/git/testdata/.gitignore +++ b/libs/git/testdata/.gitignore @@ -7,3 +7,6 @@ root.* # Directory pattern. ignoredirectory/ + +# Ignore dotfiles +.* diff --git a/libs/git/utils.go b/libs/git/utils.go index 13ce2c9e6..1d38da3aa 100644 --- a/libs/git/utils.go +++ b/libs/git/utils.go @@ -6,23 +6,23 @@ import ( giturls "github.com/whilp/git-urls" ) +// Return an origin URL as an HTTPS URL. +// The transformations in this function are not guaranteed to work for all +// Git providers. They are only guaranteed to work for GitHub. func ToHttpsUrl(url string) (string, error) { - originUrl, err := giturls.Parse(url) + origin, err := giturls.Parse(url) if err != nil { return "", err } - if originUrl.Scheme == "https" { - return originUrl.String(), nil + // If this repository is checked out over SSH + if origin.Scheme != "https" { + origin.Scheme = "https" } - // if current repo is checked out with a SSH key - if originUrl.Scheme != "https" { - originUrl.Scheme = "https" - } - // `git@` is not required for HTTPS - if originUrl.User != nil { - originUrl.User = nil + // Basic auth is not applicable for an HTTPS URL. + if origin.User != nil { + origin.User = nil } // Remove `.git` suffix, if present. - originUrl.Path = strings.TrimSuffix(originUrl.Path, ".git") - return originUrl.String(), nil + origin.Path = strings.TrimSuffix(origin.Path, ".git") + return origin.String(), nil } diff --git a/libs/git/utils_test.go b/libs/git/utils_test.go index 52a912da5..2a77cae16 100644 --- a/libs/git/utils_test.go +++ b/libs/git/utils_test.go @@ -7,7 +7,16 @@ import ( ) func TestToHttpsUrlForSsh(t *testing.T) { - url, err := ToHttpsUrl("user@foo.com:org/repo-name.git") - assert.NoError(t, err) - assert.Equal(t, "https://foo.com/org/repo-name", url) + for _, e := range []struct { + url string + expected string + }{ + {"user@foo.com:org/repo-name.git", "https://foo.com/org/repo-name"}, + {"git@github.com:databricks/cli.git", "https://github.com/databricks/cli"}, + {"https://github.com/databricks/cli.git", "https://github.com/databricks/cli"}, + } { + url, err := ToHttpsUrl(e.url) + assert.NoError(t, err) + assert.Equal(t, e.expected, url) + } } diff --git a/libs/git/view_test.go b/libs/git/view_test.go index 795e7b6e4..3ecd301b5 100644 --- a/libs/git/view_test.go +++ b/libs/git/view_test.go @@ -68,8 +68,15 @@ func testViewAtRoot(t *testing.T, tv testView) { assert.True(t, tv.Ignore("root/foo")) assert.True(t, tv.Ignore("root_double")) assert.False(t, tv.Ignore("newfile")) + assert.True(t, tv.Ignore(".gitignore")) + assert.False(t, tv.Ignore("newfile.py")) assert.True(t, tv.Ignore("ignoredirectory/")) + // Never ignore the root directory. + // This is the only path that may be checked as `.`, + // and would match the `.*` ignore pattern if specified. + assert.False(t, tv.Ignore(".")) + // Nested .gitignores should not affect root. assert.False(t, tv.Ignore("a.sh")) diff --git a/libs/jsonschema/extension.go b/libs/jsonschema/extension.go new file mode 100644 index 000000000..bbbde695b --- /dev/null +++ b/libs/jsonschema/extension.go @@ -0,0 +1,14 @@ +package jsonschema + +// Extension defines our custom JSON schema extensions. +// +// JSON schema supports custom extensions through vocabularies: +// https://json-schema.org/understanding-json-schema/reference/schema.html#vocabularies. +// We don't (yet?) define a meta-schema for the extensions below. +// It's not a big issue because the reach/scope of these extensions is limited. +type Extension struct { + // Order defines the order of a field with respect to other fields. + // If not defined, the field is ordered alphabetically after all fields + // that do have an order defined. + Order *int `json:"order,omitempty"` +} diff --git a/libs/jsonschema/instance.go b/libs/jsonschema/instance.go new file mode 100644 index 000000000..229a45b53 --- /dev/null +++ b/libs/jsonschema/instance.go @@ -0,0 +1,113 @@ +package jsonschema + +import ( + "encoding/json" + "fmt" + "os" + "slices" +) + +// Load a JSON document and validate it against the JSON schema. Instance here +// refers to a JSON document. see: https://json-schema.org/draft/2020-12/json-schema-core.html#name-instance +func (s *Schema) LoadInstance(path string) (map[string]any, error) { + instance := make(map[string]any) + b, err := os.ReadFile(path) + if err != nil { + return nil, err + } + err = json.Unmarshal(b, &instance) + if err != nil { + return nil, err + } + + // The default JSON unmarshaler parses untyped number values as float64. + // We convert integer properties from float64 to int64 here. + for name, v := range instance { + propertySchema, ok := s.Properties[name] + if !ok { + continue + } + if propertySchema.Type != IntegerType { + continue + } + integerValue, err := toInteger(v) + if err != nil { + return nil, fmt.Errorf("failed to parse property %s: %w", name, err) + } + instance[name] = integerValue + } + return instance, s.ValidateInstance(instance) +} + +func (s *Schema) ValidateInstance(instance map[string]any) error { + for _, fn := range []func(map[string]any) error{ + s.validateAdditionalProperties, + s.validateEnum, + s.validateRequired, + s.validateTypes, + } { + err := fn(instance) + if err != nil { + return err + } + } + return nil +} + +// If additional properties is set to false, this function validates instance only +// contains properties defined in the schema. +func (s *Schema) validateAdditionalProperties(instance map[string]any) error { + // Note: AdditionalProperties has the type any. + if s.AdditionalProperties != false { + return nil + } + for k := range instance { + _, ok := s.Properties[k] + if !ok { + return fmt.Errorf("property %s is not defined in the schema", k) + } + } + return nil +} + +// This function validates that all require properties in the schema have values +// in the instance. +func (s *Schema) validateRequired(instance map[string]any) error { + for _, name := range s.Required { + if _, ok := instance[name]; !ok { + return fmt.Errorf("no value provided for required property %s", name) + } + } + return nil +} + +// Validates the types of all input properties values match their types defined in the schema +func (s *Schema) validateTypes(instance map[string]any) error { + for k, v := range instance { + fieldInfo, ok := s.Properties[k] + if !ok { + continue + } + err := validateType(v, fieldInfo.Type) + if err != nil { + return fmt.Errorf("incorrect type for property %s: %w", k, err) + } + } + return nil +} + +func (s *Schema) validateEnum(instance map[string]any) error { + for k, v := range instance { + fieldInfo, ok := s.Properties[k] + if !ok { + continue + } + if fieldInfo.Enum == nil { + continue + } + if !slices.Contains(fieldInfo.Enum, v) { + return fmt.Errorf("expected value of property %s to be one of %v. Found: %v", k, fieldInfo.Enum, v) + } + } + return nil +} diff --git a/libs/jsonschema/instance_test.go b/libs/jsonschema/instance_test.go new file mode 100644 index 000000000..ffd10ca43 --- /dev/null +++ b/libs/jsonschema/instance_test.go @@ -0,0 +1,155 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateInstanceAdditionalPropertiesPermitted(t *testing.T) { + instance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + "an_additional_property": "abc", + } + + schema, err := Load("./testdata/instance-validate/test-schema.json") + require.NoError(t, err) + + err = schema.validateAdditionalProperties(instance) + assert.NoError(t, err) + + err = schema.ValidateInstance(instance) + assert.NoError(t, err) +} + +func TestValidateInstanceAdditionalPropertiesForbidden(t *testing.T) { + instance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + "an_additional_property": "abc", + } + + schema, err := Load("./testdata/instance-validate/test-schema-no-additional-properties.json") + require.NoError(t, err) + + err = schema.validateAdditionalProperties(instance) + assert.EqualError(t, err, "property an_additional_property is not defined in the schema") + + err = schema.ValidateInstance(instance) + assert.EqualError(t, err, "property an_additional_property is not defined in the schema") + + instanceWOAdditionalProperties := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + + err = schema.validateAdditionalProperties(instanceWOAdditionalProperties) + assert.NoError(t, err) + + err = schema.ValidateInstance(instanceWOAdditionalProperties) + assert.NoError(t, err) +} + +func TestValidateInstanceTypes(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema.json") + require.NoError(t, err) + + validInstance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + + err = schema.validateTypes(validInstance) + assert.NoError(t, err) + + err = schema.ValidateInstance(validInstance) + assert.NoError(t, err) + + invalidInstance := map[string]any{ + "int_val": "abc", + "float_val": 1.0, + "bool_val": false, + } + + err = schema.validateTypes(invalidInstance) + assert.EqualError(t, err, "incorrect type for property int_val: expected type integer, but value is \"abc\"") + + err = schema.ValidateInstance(invalidInstance) + assert.EqualError(t, err, "incorrect type for property int_val: expected type integer, but value is \"abc\"") +} + +func TestValidateInstanceRequired(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema-some-fields-required.json") + require.NoError(t, err) + + validInstance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + err = schema.validateRequired(validInstance) + assert.NoError(t, err) + err = schema.ValidateInstance(validInstance) + assert.NoError(t, err) + + invalidInstance := map[string]any{ + "string_val": "abc", + "float_val": 1.0, + "bool_val": false, + } + err = schema.validateRequired(invalidInstance) + assert.EqualError(t, err, "no value provided for required property int_val") + err = schema.ValidateInstance(invalidInstance) + assert.EqualError(t, err, "no value provided for required property int_val") +} + +func TestLoadInstance(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema.json") + require.NoError(t, err) + + // Expect the instance to be loaded successfully. + instance, err := schema.LoadInstance("./testdata/instance-load/valid-instance.json") + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "bool_val": false, + "int_val": int64(1), + "string_val": "abc", + "float_val": 2.0, + }, instance) + + // Expect instance validation against the schema to fail. + _, err = schema.LoadInstance("./testdata/instance-load/invalid-type-instance.json") + assert.EqualError(t, err, "incorrect type for property string_val: expected type string, but value is 123") +} + +func TestValidateInstanceEnum(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema-enum.json") + require.NoError(t, err) + + validInstance := map[string]any{ + "foo": "b", + "bar": int64(6), + } + assert.NoError(t, schema.validateEnum(validInstance)) + assert.NoError(t, schema.ValidateInstance(validInstance)) + + invalidStringInstance := map[string]any{ + "foo": "d", + "bar": int64(2), + } + assert.EqualError(t, schema.validateEnum(invalidStringInstance), "expected value of property foo to be one of [a b c]. Found: d") + assert.EqualError(t, schema.ValidateInstance(invalidStringInstance), "expected value of property foo to be one of [a b c]. Found: d") + + invalidIntInstance := map[string]any{ + "foo": "a", + "bar": int64(1), + } + assert.EqualError(t, schema.validateEnum(invalidIntInstance), "expected value of property bar to be one of [2 4 6]. Found: 1") + assert.EqualError(t, schema.ValidateInstance(invalidIntInstance), "expected value of property bar to be one of [2 4 6]. Found: 1") +} diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go new file mode 100644 index 000000000..108102a64 --- /dev/null +++ b/libs/jsonschema/schema.go @@ -0,0 +1,155 @@ +package jsonschema + +import ( + "encoding/json" + "fmt" + "os" + "slices" +) + +// defines schema for a json object +type Schema struct { + // Type of the object + Type Type `json:"type,omitempty"` + + // Description of the object. This is rendered as inline documentation in the + // IDE. This is manually injected here using schema.Docs + Description string `json:"description,omitempty"` + + // Schemas for the fields of an struct. The keys are the first json tag. + // The values are the schema for the type of the field + Properties map[string]*Schema `json:"properties,omitempty"` + + // The schema for all values of an array + Items *Schema `json:"items,omitempty"` + + // The schema for any properties not mentioned in the Schema.Properties field. + // this validates maps[string]any in bundle configuration + // OR + // A boolean type with value false. Setting false here validates that all + // properties in the config have been defined in the json schema as properties + // + // Its type during runtime will either be *Schema or bool + AdditionalProperties any `json:"additionalProperties,omitempty"` + + // Required properties for the object. Any fields missing the "omitempty" + // json tag will be included + Required []string `json:"required,omitempty"` + + // URI to a json schema + Reference *string `json:"$ref,omitempty"` + + // Default value for the property / object + Default any `json:"default,omitempty"` + + // List of valid values for a JSON instance for this schema. + Enum []any `json:"enum,omitempty"` + + // Extension embeds our custom JSON schema extensions. + Extension +} + +type Type string + +const ( + InvalidType Type = "invalid" + BooleanType Type = "boolean" + StringType Type = "string" + NumberType Type = "number" + ObjectType Type = "object" + ArrayType Type = "array" + IntegerType Type = "integer" +) + +func (schema *Schema) validate() error { + // Validate property types are all valid JSON schema types. + for _, v := range schema.Properties { + switch v.Type { + case NumberType, BooleanType, StringType, IntegerType: + continue + case "int", "int32", "int64": + return fmt.Errorf("type %s is not a recognized json schema type. Please use \"integer\" instead", v.Type) + case "float", "float32", "float64": + return fmt.Errorf("type %s is not a recognized json schema type. Please use \"number\" instead", v.Type) + case "bool": + return fmt.Errorf("type %s is not a recognized json schema type. Please use \"boolean\" instead", v.Type) + default: + return fmt.Errorf("type %s is not a recognized json schema type", v.Type) + } + } + + // Validate default property values are consistent with types. + for name, property := range schema.Properties { + if property.Default == nil { + continue + } + if err := validateType(property.Default, property.Type); err != nil { + return fmt.Errorf("type validation for default value of property %s failed: %w", name, err) + } + } + + // Validate enum field values for properties are consistent with types. + for name, property := range schema.Properties { + if property.Enum == nil { + continue + } + for i, enum := range property.Enum { + err := validateType(enum, property.Type) + if err != nil { + return fmt.Errorf("type validation for enum at index %v failed for property %s: %w", i, name, err) + } + } + } + + // Validate default value is contained in the list of enums if both are defined. + for name, property := range schema.Properties { + if property.Default == nil || property.Enum == nil { + continue + } + // We expect the default value to be consistent with the list of enum + // values. + if !slices.Contains(property.Enum, property.Default) { + return fmt.Errorf("list of enum values for property %s does not contain default value %v: %v", name, property.Default, property.Enum) + } + } + return nil +} + +func Load(path string) (*Schema, error) { + b, err := os.ReadFile(path) + if err != nil { + return nil, err + } + schema := &Schema{} + err = json.Unmarshal(b, schema) + if err != nil { + return nil, err + } + + // Convert the default values of top-level properties to integers. + // This is required because the default JSON unmarshaler parses numbers + // as floats when the Golang field it's being loaded to is untyped. + // + // NOTE: properties can be recursively defined in a schema, but the current + // use-cases only uses the first layer of properties so we skip converting + // any recursive properties. + for name, property := range schema.Properties { + if property.Type != IntegerType { + continue + } + if property.Default != nil { + property.Default, err = toInteger(property.Default) + if err != nil { + return nil, fmt.Errorf("failed to parse default value for property %s: %w", name, err) + } + } + for i, enum := range property.Enum { + property.Enum[i], err = toInteger(enum) + if err != nil { + return nil, fmt.Errorf("failed to parse enum value %v at index %v for property %s: %w", enum, i, name, err) + } + } + } + + return schema, schema.validate() +} diff --git a/libs/jsonschema/schema_order.go b/libs/jsonschema/schema_order.go new file mode 100644 index 000000000..3bc3e7d00 --- /dev/null +++ b/libs/jsonschema/schema_order.go @@ -0,0 +1,57 @@ +package jsonschema + +import ( + "slices" + "strings" +) + +// Property defines a single property of a struct schema. +// This type is not used in the schema itself but rather to +// return the pair of a property name and its schema. +type Property struct { + Name string + Schema *Schema +} + +// OrderedProperties returns the properties of the schema ordered according +// to the value of their `order` extension. If this extension is not set, the +// properties are ordered alphabetically. +func (s *Schema) OrderedProperties() []Property { + order := make(map[string]*int) + out := make([]Property, 0, len(s.Properties)) + for key, property := range s.Properties { + order[key] = property.Order + out = append(out, Property{ + Name: key, + Schema: property, + }) + } + + // Sort the properties by order and then by name. + slices.SortFunc(out, func(a, b Property) int { + oa := order[a.Name] + ob := order[b.Name] + cmp := 0 + switch { + case oa != nil && ob != nil: + // Compare the order values if both are set. + cmp = *oa - *ob + case oa == nil && ob != nil: + // If only one is set, the one that is set comes first. + cmp = 1 + case oa != nil && ob == nil: + // If only one is set, the one that is set comes first. + cmp = -1 + } + + // If we have a non-zero comparison, return it. + if cmp != 0 { + return cmp + } + + // If the order is the same, compare by name. + return strings.Compare(a.Name, b.Name) + }) + + return out +} diff --git a/libs/jsonschema/schema_order_test.go b/libs/jsonschema/schema_order_test.go new file mode 100644 index 000000000..56d4d6355 --- /dev/null +++ b/libs/jsonschema/schema_order_test.go @@ -0,0 +1,60 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOrderedProperties(t *testing.T) { + newInt := func(i int) *int { + return &i + } + + s := Schema{ + Properties: map[string]*Schema{ + "bbb": { + Type: StringType, + }, + "ccc": { + Type: StringType, + }, + "ddd": { + Type: StringType, + }, + "zzz1": { + Type: StringType, + Extension: Extension{ + Order: newInt(-1), + }, + }, + "zzz2": { + Type: StringType, + Extension: Extension{ + Order: newInt(-2), + }, + }, + "aaa1": { + Type: StringType, + Extension: Extension{ + Order: newInt(1), + }, + }, + "aaa2": { + Type: StringType, + Extension: Extension{ + Order: newInt(2), + }, + }, + }, + } + + // Test that the properties are ordered by order and then by name. + properties := s.OrderedProperties() + names := make([]string, len(properties)) + for i, property := range properties { + names[i] = property.Name + } + + assert.Equal(t, []string{"zzz2", "zzz1", "aaa1", "aaa2", "bbb", "ccc", "ddd"}, names) +} diff --git a/libs/jsonschema/schema_test.go b/libs/jsonschema/schema_test.go new file mode 100644 index 000000000..db559ea88 --- /dev/null +++ b/libs/jsonschema/schema_test.go @@ -0,0 +1,141 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaValidateTypeNames(t *testing.T) { + var err error + toSchema := func(s string) *Schema { + return &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: Type(s), + }, + }, + } + } + + err = toSchema("string").validate() + assert.NoError(t, err) + + err = toSchema("boolean").validate() + assert.NoError(t, err) + + err = toSchema("number").validate() + assert.NoError(t, err) + + err = toSchema("integer").validate() + assert.NoError(t, err) + + err = toSchema("int").validate() + assert.EqualError(t, err, "type int is not a recognized json schema type. Please use \"integer\" instead") + + err = toSchema("float").validate() + assert.EqualError(t, err, "type float is not a recognized json schema type. Please use \"number\" instead") + + err = toSchema("bool").validate() + assert.EqualError(t, err, "type bool is not a recognized json schema type. Please use \"boolean\" instead") + + err = toSchema("foobar").validate() + assert.EqualError(t, err, "type foobar is not a recognized json schema type") +} + +func TestSchemaLoadIntegers(t *testing.T) { + schema, err := Load("./testdata/schema-load-int/schema-valid.json") + assert.NoError(t, err) + assert.Equal(t, int64(1), schema.Properties["abc"].Default) + assert.Equal(t, []any{int64(1), int64(2), int64(3)}, schema.Properties["abc"].Enum) +} + +func TestSchemaLoadIntegersWithInvalidDefault(t *testing.T) { + _, err := Load("./testdata/schema-load-int/schema-invalid-default.json") + assert.EqualError(t, err, "failed to parse default value for property abc: expected integer value, got: 1.1") +} + +func TestSchemaLoadIntegersWithInvalidEnums(t *testing.T) { + _, err := Load("./testdata/schema-load-int/schema-invalid-enum.json") + assert.EqualError(t, err, "failed to parse enum value 2.4 at index 1 for property abc: expected integer value, got: 2.4") +} + +func TestSchemaValidateDefaultType(t *testing.T) { + invalidSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "number", + Default: "abc", + }, + }, + } + + err := invalidSchema.validate() + assert.EqualError(t, err, "type validation for default value of property foo failed: expected type float, but value is \"abc\"") + + validSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "boolean", + Default: true, + }, + }, + } + + err = validSchema.validate() + assert.NoError(t, err) +} + +func TestSchemaValidateEnumType(t *testing.T) { + invalidSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "boolean", + Enum: []any{true, "false"}, + }, + }, + } + + err := invalidSchema.validate() + assert.EqualError(t, err, "type validation for enum at index 1 failed for property foo: expected type boolean, but value is \"false\"") + + validSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "boolean", + Enum: []any{true, false}, + }, + }, + } + + err = validSchema.validate() + assert.NoError(t, err) +} + +func TestSchemaValidateErrorWhenDefaultValueIsNotInEnums(t *testing.T) { + invalidSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "string", + Default: "abc", + Enum: []any{"def", "ghi"}, + }, + }, + } + + err := invalidSchema.validate() + assert.EqualError(t, err, "list of enum values for property foo does not contain default value abc: [def ghi]") + + validSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "string", + Default: "abc", + Enum: []any{"def", "ghi", "abc"}, + }, + }, + } + + err = validSchema.validate() + assert.NoError(t, err) +} diff --git a/libs/jsonschema/testdata/instance-load/invalid-type-instance.json b/libs/jsonschema/testdata/instance-load/invalid-type-instance.json new file mode 100644 index 000000000..c55b6fccb --- /dev/null +++ b/libs/jsonschema/testdata/instance-load/invalid-type-instance.json @@ -0,0 +1,6 @@ +{ + "int_val": 1, + "bool_val": false, + "string_val": 123, + "float_val": 3.0 +} diff --git a/libs/jsonschema/testdata/instance-load/valid-instance.json b/libs/jsonschema/testdata/instance-load/valid-instance.json new file mode 100644 index 000000000..7d4dc818a --- /dev/null +++ b/libs/jsonschema/testdata/instance-load/valid-instance.json @@ -0,0 +1,6 @@ +{ + "int_val": 1, + "bool_val": false, + "string_val": "abc", + "float_val": 2.0 +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema-enum.json b/libs/jsonschema/testdata/instance-validate/test-schema-enum.json new file mode 100644 index 000000000..75ffd6eb8 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema-enum.json @@ -0,0 +1,12 @@ +{ + "properties": { + "foo": { + "type": "string", + "enum": ["a", "b", "c"] + }, + "bar": { + "type": "integer", + "enum": [2,4,6] + } + } +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json b/libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json new file mode 100644 index 000000000..98b19d5a4 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json @@ -0,0 +1,19 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + }, + "additionalProperties": false +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json b/libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json new file mode 100644 index 000000000..465811034 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json @@ -0,0 +1,19 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + }, + "required": ["int_val", "float_val", "bool_val"] +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema.json b/libs/jsonschema/testdata/instance-validate/test-schema.json new file mode 100644 index 000000000..41eb82519 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema.json @@ -0,0 +1,18 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json b/libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json new file mode 100644 index 000000000..1e709f622 --- /dev/null +++ b/libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json @@ -0,0 +1,9 @@ +{ + "type": "object", + "properties": { + "abc": { + "type": "integer", + "default": 1.1 + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json b/libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json new file mode 100644 index 000000000..5bd2b3f2b --- /dev/null +++ b/libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json @@ -0,0 +1,10 @@ +{ + "type": "object", + "properties": { + "abc": { + "type": "integer", + "default": 1, + "enum": [1,2.4,3] + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-valid.json b/libs/jsonschema/testdata/schema-load-int/schema-valid.json new file mode 100644 index 000000000..a1167a6c9 --- /dev/null +++ b/libs/jsonschema/testdata/schema-load-int/schema-valid.json @@ -0,0 +1,10 @@ +{ + "type": "object", + "properties": { + "abc": { + "type": "integer", + "default": 1, + "enum": [1,2,3] + } + } +} diff --git a/libs/jsonschema/utils.go b/libs/jsonschema/utils.go new file mode 100644 index 000000000..66db9603e --- /dev/null +++ b/libs/jsonschema/utils.go @@ -0,0 +1,113 @@ +package jsonschema + +import ( + "errors" + "fmt" + "strconv" +) + +// function to check whether a float value represents an integer +func isIntegerValue(v float64) bool { + return v == float64(int64(v)) +} + +func toInteger(v any) (int64, error) { + switch typedVal := v.(type) { + // cast float to int + case float32: + if !isIntegerValue(float64(typedVal)) { + return 0, fmt.Errorf("expected integer value, got: %v", v) + } + return int64(typedVal), nil + case float64: + if !isIntegerValue(typedVal) { + return 0, fmt.Errorf("expected integer value, got: %v", v) + } + return int64(typedVal), nil + + // pass through common integer cases + case int: + return int64(typedVal), nil + case int32: + return int64(typedVal), nil + case int64: + return typedVal, nil + + default: + return 0, fmt.Errorf("cannot convert %#v to an integer", v) + } +} + +func ToString(v any, T Type) (string, error) { + switch T { + case BooleanType: + boolVal, ok := v.(bool) + if !ok { + return "", fmt.Errorf("expected bool, got: %#v", v) + } + return strconv.FormatBool(boolVal), nil + case StringType: + strVal, ok := v.(string) + if !ok { + return "", fmt.Errorf("expected string, got: %#v", v) + } + return strVal, nil + case NumberType: + floatVal, ok := v.(float64) + if !ok { + return "", fmt.Errorf("expected float, got: %#v", v) + } + return strconv.FormatFloat(floatVal, 'f', -1, 64), nil + case IntegerType: + intVal, err := toInteger(v) + if err != nil { + return "", err + } + return strconv.FormatInt(intVal, 10), nil + case ArrayType, ObjectType: + return "", fmt.Errorf("cannot format object of type %s as a string. Value of object: %#v", T, v) + default: + return "", fmt.Errorf("unknown json schema type: %q", T) + } +} + +func ToStringSlice(arr []any, T Type) ([]string, error) { + res := []string{} + for _, v := range arr { + s, err := ToString(v, T) + if err != nil { + return nil, err + } + res = append(res, s) + } + return res, nil +} + +func FromString(s string, T Type) (any, error) { + if T == StringType { + return s, nil + } + + // Variables to store value and error from parsing + var v any + var err error + + switch T { + case BooleanType: + v, err = strconv.ParseBool(s) + case NumberType: + v, err = strconv.ParseFloat(s, 32) + case IntegerType: + v, err = strconv.ParseInt(s, 10, 64) + case ArrayType, ObjectType: + return "", fmt.Errorf("cannot parse string as object of type %s. Value of string: %q", T, s) + default: + return "", fmt.Errorf("unknown json schema type: %q", T) + } + + // Return more readable error incase of a syntax error + if errors.Is(err, strconv.ErrSyntax) { + return nil, fmt.Errorf("could not parse %q as a %s: %w", s, T, err) + } + return v, err +} diff --git a/libs/jsonschema/utils_test.go b/libs/jsonschema/utils_test.go new file mode 100644 index 000000000..29529aaa9 --- /dev/null +++ b/libs/jsonschema/utils_test.go @@ -0,0 +1,130 @@ +package jsonschema + +import ( + "math" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTemplateIsInteger(t *testing.T) { + assert.False(t, isIntegerValue(1.1)) + assert.False(t, isIntegerValue(0.1)) + assert.False(t, isIntegerValue(-0.1)) + + assert.True(t, isIntegerValue(-1.0)) + assert.True(t, isIntegerValue(0.0)) + assert.True(t, isIntegerValue(2.0)) +} + +func TestTemplateToInteger(t *testing.T) { + v, err := toInteger(float32(2)) + assert.NoError(t, err) + assert.Equal(t, int64(2), v) + + v, err = toInteger(float64(4)) + assert.NoError(t, err) + assert.Equal(t, int64(4), v) + + v, err = toInteger(float64(4)) + assert.NoError(t, err) + assert.Equal(t, int64(4), v) + + v, err = toInteger(float64(math.MaxInt32 + 10)) + assert.NoError(t, err) + assert.Equal(t, int64(2147483657), v) + + v, err = toInteger(2) + assert.NoError(t, err) + assert.Equal(t, int64(2), v) + + _, err = toInteger(float32(2.2)) + assert.EqualError(t, err, "expected integer value, got: 2.2") + + _, err = toInteger(float64(math.MaxInt32 + 100.1)) + assert.ErrorContains(t, err, "expected integer value, got: 2.1474837471e+09") + + _, err = toInteger("abcd") + assert.EqualError(t, err, "cannot convert \"abcd\" to an integer") +} + +func TestTemplateToString(t *testing.T) { + s, err := ToString(true, BooleanType) + assert.NoError(t, err) + assert.Equal(t, "true", s) + + s, err = ToString("abc", StringType) + assert.NoError(t, err) + assert.Equal(t, "abc", s) + + s, err = ToString(1.1, NumberType) + assert.NoError(t, err) + assert.Equal(t, "1.1", s) + + s, err = ToString(2, IntegerType) + assert.NoError(t, err) + assert.Equal(t, "2", s) + + _, err = ToString([]string{}, ArrayType) + assert.EqualError(t, err, "cannot format object of type array as a string. Value of object: []string{}") + + _, err = ToString("true", BooleanType) + assert.EqualError(t, err, "expected bool, got: \"true\"") + + _, err = ToString(123, StringType) + assert.EqualError(t, err, "expected string, got: 123") + + _, err = ToString(false, NumberType) + assert.EqualError(t, err, "expected float, got: false") + + _, err = ToString("abc", IntegerType) + assert.EqualError(t, err, "cannot convert \"abc\" to an integer") + + _, err = ToString("abc", "foobar") + assert.EqualError(t, err, "unknown json schema type: \"foobar\"") +} + +func TestTemplateFromString(t *testing.T) { + v, err := FromString("true", BooleanType) + assert.NoError(t, err) + assert.Equal(t, true, v) + + v, err = FromString("abc", StringType) + assert.NoError(t, err) + assert.Equal(t, "abc", v) + + v, err = FromString("1.1", NumberType) + assert.NoError(t, err) + // Floating point conversions are not perfect + assert.True(t, (v.(float64)-1.1) < 0.000001) + + v, err = FromString("12345", IntegerType) + assert.NoError(t, err) + assert.Equal(t, int64(12345), v) + + v, err = FromString("123", NumberType) + assert.NoError(t, err) + assert.Equal(t, float64(123), v) + + _, err = FromString("qrt", ArrayType) + assert.EqualError(t, err, "cannot parse string as object of type array. Value of string: \"qrt\"") + + _, err = FromString("abc", IntegerType) + assert.EqualError(t, err, "could not parse \"abc\" as a integer: strconv.ParseInt: parsing \"abc\": invalid syntax") + + _, err = FromString("1.0", IntegerType) + assert.EqualError(t, err, "could not parse \"1.0\" as a integer: strconv.ParseInt: parsing \"1.0\": invalid syntax") + + _, err = FromString("1.0", "foobar") + assert.EqualError(t, err, "unknown json schema type: \"foobar\"") +} + +func TestTemplateToStringSlice(t *testing.T) { + s, err := ToStringSlice([]any{"a", "b", "c"}, StringType) + assert.NoError(t, err) + assert.Equal(t, []string{"a", "b", "c"}, s) + + s, err = ToStringSlice([]any{1.1, 2.2, 3.3}, NumberType) + assert.NoError(t, err) + assert.Equal(t, []string{"1.1", "2.2", "3.3"}, s) +} diff --git a/libs/jsonschema/validate_type.go b/libs/jsonschema/validate_type.go new file mode 100644 index 000000000..125d6b20b --- /dev/null +++ b/libs/jsonschema/validate_type.go @@ -0,0 +1,56 @@ +package jsonschema + +import ( + "fmt" + "reflect" + "slices" +) + +type validateTypeFunc func(v any) error + +func validateType(v any, fieldType Type) error { + validateFunc, ok := validateTypeFuncs[fieldType] + if !ok { + return nil + } + return validateFunc(v) +} + +func validateString(v any) error { + if _, ok := v.(string); !ok { + return fmt.Errorf("expected type string, but value is %#v", v) + } + return nil +} + +func validateBoolean(v any) error { + if _, ok := v.(bool); !ok { + return fmt.Errorf("expected type boolean, but value is %#v", v) + } + return nil +} + +func validateNumber(v any) error { + if !slices.Contains([]reflect.Kind{reflect.Float32, reflect.Float64}, + reflect.TypeOf(v).Kind()) { + return fmt.Errorf("expected type float, but value is %#v", v) + } + return nil +} + +func validateInteger(v any) error { + if !slices.Contains([]reflect.Kind{reflect.Int, reflect.Int8, reflect.Int16, + reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, + reflect.Uint32, reflect.Uint64}, + reflect.TypeOf(v).Kind()) { + return fmt.Errorf("expected type integer, but value is %#v", v) + } + return nil +} + +var validateTypeFuncs map[Type]validateTypeFunc = map[Type]validateTypeFunc{ + StringType: validateString, + BooleanType: validateBoolean, + IntegerType: validateInteger, + NumberType: validateNumber, +} diff --git a/libs/jsonschema/validate_type_test.go b/libs/jsonschema/validate_type_test.go new file mode 100644 index 000000000..36d9e5758 --- /dev/null +++ b/libs/jsonschema/validate_type_test.go @@ -0,0 +1,128 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestValidatorString(t *testing.T) { + err := validateString("abc") + assert.NoError(t, err) + + err = validateString(1) + assert.ErrorContains(t, err, "expected type string, but value is 1") + + err = validateString(true) + assert.ErrorContains(t, err, "expected type string, but value is true") + + err = validateString("false") + assert.NoError(t, err) +} + +func TestValidatorBoolean(t *testing.T) { + err := validateBoolean(true) + assert.NoError(t, err) + + err = validateBoolean(1) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + + err = validateBoolean("abc") + assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") + + err = validateBoolean("false") + assert.ErrorContains(t, err, "expected type boolean, but value is \"false\"") +} + +func TestValidatorNumber(t *testing.T) { + err := validateNumber(true) + assert.ErrorContains(t, err, "expected type float, but value is true") + + err = validateNumber(int32(1)) + assert.ErrorContains(t, err, "expected type float, but value is 1") + + err = validateNumber(int64(2)) + assert.ErrorContains(t, err, "expected type float, but value is 2") + + err = validateNumber(float32(1)) + assert.NoError(t, err) + + err = validateNumber(float64(1)) + assert.NoError(t, err) + + err = validateNumber("abc") + assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") +} + +func TestValidatorInt(t *testing.T) { + err := validateInteger(true) + assert.ErrorContains(t, err, "expected type integer, but value is true") + + err = validateInteger(int32(1)) + assert.NoError(t, err) + + err = validateInteger(int64(1)) + assert.NoError(t, err) + + err = validateInteger(float32(1)) + assert.ErrorContains(t, err, "expected type integer, but value is 1") + + err = validateInteger(float64(1)) + assert.ErrorContains(t, err, "expected type integer, but value is 1") + + err = validateInteger("abc") + assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") +} + +func TestTemplateValidateType(t *testing.T) { + // assert validation passing + err := validateType(int(0), IntegerType) + assert.NoError(t, err) + err = validateType(int32(1), IntegerType) + assert.NoError(t, err) + err = validateType(int64(1), IntegerType) + assert.NoError(t, err) + + err = validateType(float32(1.1), NumberType) + assert.NoError(t, err) + err = validateType(float64(1.2), NumberType) + assert.NoError(t, err) + + err = validateType(false, BooleanType) + assert.NoError(t, err) + + err = validateType("abc", StringType) + assert.NoError(t, err) + + // assert validation failing for integers + err = validateType(float64(1.2), IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is 1.2") + err = validateType(true, IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is true") + err = validateType("abc", IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") + + // assert validation failing for floats + err = validateType(true, NumberType) + assert.ErrorContains(t, err, "expected type float, but value is true") + err = validateType("abc", NumberType) + assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") + err = validateType(int(1), NumberType) + assert.ErrorContains(t, err, "expected type float, but value is 1") + + // assert validation failing for boolean + err = validateType(int(1), BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + err = validateType(float64(1), BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + err = validateType("abc", BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") + + // assert validation failing for string + err = validateType(int(1), StringType) + assert.ErrorContains(t, err, "expected type string, but value is 1") + err = validateType(float64(1), StringType) + assert.ErrorContains(t, err, "expected type string, but value is 1") + err = validateType(false, StringType) + assert.ErrorContains(t, err, "expected type string, but value is false") +} diff --git a/libs/locker/locker.go b/libs/locker/locker.go index bb95b7840..b0d65c42e 100644 --- a/libs/locker/locker.go +++ b/libs/locker/locker.go @@ -8,12 +8,12 @@ import ( "fmt" "io" "io/fs" + "slices" "time" "github.com/databricks/cli/libs/filer" "github.com/databricks/databricks-sdk-go" "github.com/google/uuid" - "golang.org/x/exp/slices" ) type UnlockOption int @@ -105,10 +105,10 @@ func (locker *Locker) assertLockHeld(ctx context.Context) error { return err } if activeLockState.ID != locker.State.ID && !activeLockState.IsForced { - return fmt.Errorf("deploy lock acquired by %s at %v. Use --force to override", activeLockState.User, activeLockState.AcquisitionTime) + return fmt.Errorf("deploy lock acquired by %s at %v. Use --force-lock to override", activeLockState.User, activeLockState.AcquisitionTime) } if activeLockState.ID != locker.State.ID && activeLockState.IsForced { - return fmt.Errorf("deploy lock force acquired by %s at %v. Use --force to override", activeLockState.User, activeLockState.AcquisitionTime) + return fmt.Errorf("deploy lock force acquired by %s at %v. Use --force-lock to override", activeLockState.User, activeLockState.AcquisitionTime) } return nil } diff --git a/libs/log/context.go b/libs/log/context.go index 7ed1d292e..d9e31d116 100644 --- a/libs/log/context.go +++ b/libs/log/context.go @@ -3,7 +3,7 @@ package log import ( "context" - "golang.org/x/exp/slog" + "log/slog" ) type logger int diff --git a/libs/log/levels.go b/libs/log/levels.go index f6277cf3f..cdb5a1e1e 100644 --- a/libs/log/levels.go +++ b/libs/log/levels.go @@ -1,6 +1,6 @@ package log -import "golang.org/x/exp/slog" +import "log/slog" const ( LevelTrace slog.Level = -8 diff --git a/libs/log/logger.go b/libs/log/logger.go index 80d8782cf..43a30e92b 100644 --- a/libs/log/logger.go +++ b/libs/log/logger.go @@ -6,7 +6,7 @@ import ( "runtime" "time" - "golang.org/x/exp/slog" + "log/slog" ) // GetLogger returns either the logger configured on the context, diff --git a/libs/log/replace_attr.go b/libs/log/replace_attr.go index 55d2c15ff..b71e47d23 100644 --- a/libs/log/replace_attr.go +++ b/libs/log/replace_attr.go @@ -1,6 +1,6 @@ package log -import "golang.org/x/exp/slog" +import "log/slog" type ReplaceAttrFunction func(groups []string, a slog.Attr) slog.Attr diff --git a/libs/log/replace_attr_test.go b/libs/log/replace_attr_test.go index dce11be18..afedeaa61 100644 --- a/libs/log/replace_attr_test.go +++ b/libs/log/replace_attr_test.go @@ -1,10 +1,10 @@ package log import ( + "log/slog" "testing" "github.com/stretchr/testify/assert" - "golang.org/x/exp/slog" ) func testReplaceA(groups []string, a slog.Attr) slog.Attr { diff --git a/libs/log/sdk.go b/libs/log/sdk.go index 975f83aa7..e1b1ffed4 100644 --- a/libs/log/sdk.go +++ b/libs/log/sdk.go @@ -6,8 +6,9 @@ import ( "runtime" "time" + "log/slog" + sdk "github.com/databricks/databricks-sdk-go/logger" - "golang.org/x/exp/slog" ) // slogAdapter makes an slog.Logger usable with the Databricks SDK. diff --git a/libs/log/source.go b/libs/log/source.go index 4a30aaab3..d0fd30dc5 100644 --- a/libs/log/source.go +++ b/libs/log/source.go @@ -1,9 +1,8 @@ package log import ( + "log/slog" "path/filepath" - - "golang.org/x/exp/slog" ) // ReplaceSourceAttr rewrites the source attribute to include only the file's basename. diff --git a/libs/log/source_test.go b/libs/log/source_test.go index 010aad5ab..5c587af66 100644 --- a/libs/log/source_test.go +++ b/libs/log/source_test.go @@ -1,10 +1,10 @@ package log import ( + "log/slog" "testing" "github.com/stretchr/testify/assert" - "golang.org/x/exp/slog" ) func TestReplaceSourceAttrSourceKey(t *testing.T) { diff --git a/libs/set/set.go b/libs/set/set.go new file mode 100644 index 000000000..4798ed092 --- /dev/null +++ b/libs/set/set.go @@ -0,0 +1,75 @@ +package set + +import ( + "fmt" + + "golang.org/x/exp/maps" +) + +type hashFunc[T any] func(a T) string + +// Set struct represents set data structure +type Set[T any] struct { + key hashFunc[T] + data map[string]T +} + +// NewSetFromF initialise a new set with initial values and a hash function +// to define uniqueness of value +func NewSetFromF[T any](values []T, f hashFunc[T]) *Set[T] { + s := &Set[T]{ + key: f, + data: make(map[string]T), + } + + for _, v := range values { + s.Add(v) + } + + return s +} + +// NewSetF initialise a new empty and a hash function +// to define uniqueness of value +func NewSetF[T any](f hashFunc[T]) *Set[T] { + return NewSetFromF([]T{}, f) +} + +// NewSetFrom initialise a new set with initial values which are comparable +func NewSetFrom[T comparable](values []T) *Set[T] { + return NewSetFromF(values, func(item T) string { + return fmt.Sprintf("%v", item) + }) +} + +// NewSetFrom initialise a new empty set for comparable values +func NewSet[T comparable]() *Set[T] { + return NewSetFrom([]T{}) +} + +func (s *Set[T]) addOne(item T) { + s.data[s.key(item)] = item +} + +// Add one or multiple items to set +func (s *Set[T]) Add(items ...T) { + for _, i := range items { + s.addOne(i) + } +} + +// Remove an item from set. No-op if the item does not exist +func (s *Set[T]) Remove(item T) { + delete(s.data, s.key(item)) +} + +// Indicates if the item exists in the set +func (s *Set[T]) Has(item T) bool { + _, ok := s.data[s.key(item)] + return ok +} + +// Returns an iterable slice of values from set +func (s *Set[T]) Iter() []T { + return maps.Values(s.data) +} diff --git a/libs/set/set_test.go b/libs/set/set_test.go new file mode 100644 index 000000000..c2b6e25c8 --- /dev/null +++ b/libs/set/set_test.go @@ -0,0 +1,111 @@ +package set + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSet(t *testing.T) { + s := NewSetFrom([]string{}) + require.ElementsMatch(t, []string{}, s.Iter()) + + s = NewSetFrom([]string{"a", "a", "a", "b", "b", "c", "d", "e"}) + require.ElementsMatch(t, []string{"a", "b", "c", "d", "e"}, s.Iter()) + + i := NewSetFrom([]int{1, 1, 2, 3, 4, 5, 7, 7, 7, 10, 11}) + require.ElementsMatch(t, []int{1, 2, 3, 4, 5, 7, 10, 11}, i.Iter()) + + f := NewSetFrom([]float32{1.1, 1.1, 2.0, 3.1, 4.5, 5.1, 7.1, 7.2, 7.1, 10.1, 11.0}) + require.ElementsMatch(t, []float32{1.1, 2.0, 3.1, 4.5, 5.1, 7.1, 7.2, 10.1, 11.0}, f.Iter()) +} + +type testStruct struct { + key string + value int +} + +func TestSetCustomKey(t *testing.T) { + s := NewSetF(func(item *testStruct) string { + return fmt.Sprintf("%s:%d", item.key, item.value) + }) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"b", 2}) + s.Add(&testStruct{"c", 1}) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"c", 1}) + s.Add(&testStruct{"c", 3}) + + require.ElementsMatch(t, []*testStruct{ + {"a", 1}, + {"b", 2}, + {"c", 1}, + {"c", 3}, + }, s.Iter()) +} + +func TestSetAdd(t *testing.T) { + s := NewSet[string]() + s.Add("a") + s.Add("a") + s.Add("a") + s.Add("b") + s.Add("c") + s.Add("c") + s.Add("d") + s.Add("d") + + require.ElementsMatch(t, []string{"a", "b", "c", "d"}, s.Iter()) +} + +func TestSetRemove(t *testing.T) { + s := NewSet[string]() + s.Add("a") + s.Add("a") + s.Add("a") + s.Add("b") + s.Add("c") + s.Add("c") + s.Add("d") + s.Add("d") + + s.Remove("d") + s.Remove("d") + s.Remove("a") + + require.ElementsMatch(t, []string{"b", "c"}, s.Iter()) +} + +func TestSetHas(t *testing.T) { + s := NewSet[string]() + require.False(t, s.Has("a")) + + s.Add("a") + require.True(t, s.Has("a")) + + s.Add("a") + s.Add("a") + require.True(t, s.Has("a")) + + s.Add("b") + s.Add("c") + s.Add("c") + s.Add("d") + s.Add("d") + + require.True(t, s.Has("a")) + require.True(t, s.Has("b")) + require.True(t, s.Has("c")) + require.True(t, s.Has("d")) + + s.Remove("d") + s.Remove("a") + + require.False(t, s.Has("a")) + require.True(t, s.Has("b")) + require.True(t, s.Has("c")) + require.False(t, s.Has("d")) +} diff --git a/libs/sync/path.go b/libs/sync/path.go index 7fd1b9a97..97a908965 100644 --- a/libs/sync/path.go +++ b/libs/sync/path.go @@ -13,42 +13,6 @@ import ( "github.com/databricks/databricks-sdk-go/service/workspace" ) -// Return if the child path is nested under the parent path. -func isPathNestedUnder(child, parent string) bool { - child = path.Clean(child) - parent = path.Clean(parent) - - // Traverse up the tree as long as "child" is contained in "parent". - for len(child) > len(parent) && strings.HasPrefix(child, parent) { - child = path.Dir(child) - if child == parent { - return true - } - } - return false -} - -// Check if the specified path is nested under one of the allowed base paths. -func checkPathNestedUnderBasePaths(me *iam.User, p string) error { - validBasePaths := []string{ - path.Clean(fmt.Sprintf("/Users/%s", me.UserName)), - path.Clean(fmt.Sprintf("/Repos/%s", me.UserName)), - } - - givenBasePath := path.Clean(p) - match := false - for _, basePath := range validBasePaths { - if isPathNestedUnder(givenBasePath, basePath) { - match = true - break - } - } - if !match { - return fmt.Errorf("path must be nested under %s", strings.Join(validBasePaths, " or ")) - } - return nil -} - func repoPathForPath(me *iam.User, remotePath string) string { base := path.Clean(fmt.Sprintf("/Repos/%s", me.UserName)) remotePath = path.Clean(remotePath) @@ -60,15 +24,16 @@ func repoPathForPath(me *iam.User, remotePath string) string { // EnsureRemotePathIsUsable checks if the specified path is nested under // expected base paths and if it is a directory or repository. -func EnsureRemotePathIsUsable(ctx context.Context, wsc *databricks.WorkspaceClient, remotePath string) error { - me, err := wsc.CurrentUser.Me(ctx) - if err != nil { - return err - } +func EnsureRemotePathIsUsable(ctx context.Context, wsc *databricks.WorkspaceClient, remotePath string, me *iam.User) error { + var err error - err = checkPathNestedUnderBasePaths(me, remotePath) - if err != nil { - return err + // TODO: we should cache CurrentUser.Me at the SDK level + // for now we let clients pass in any existing user they might already have + if me == nil { + me, err = wsc.CurrentUser.Me(ctx) + if err != nil { + return err + } } // Ensure that the remote path exists. diff --git a/libs/sync/path_test.go b/libs/sync/path_test.go index 18475c926..2d492251f 100644 --- a/libs/sync/path_test.go +++ b/libs/sync/path_test.go @@ -7,37 +7,6 @@ import ( "github.com/stretchr/testify/assert" ) -func TestPathNestedUnderBasePaths(t *testing.T) { - me := iam.User{ - UserName: "jane@doe.com", - } - - // Not nested under allowed base paths. - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/.")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/..")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/john@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.comsuffix/foo")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/.")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/..")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/john@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.comsuffix/foo")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/")) - - // Nested under allowed base paths. - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/./foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/foo/bar/qux")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/./foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/foo/bar/qux")) -} - func TestPathToRepoPath(t *testing.T) { me := iam.User{ UserName: "jane@doe.com", diff --git a/libs/sync/sync.go b/libs/sync/sync.go index 5c4c9d8f6..8be478fc3 100644 --- a/libs/sync/sync.go +++ b/libs/sync/sync.go @@ -6,14 +6,19 @@ import ( "time" "github.com/databricks/cli/libs/filer" + "github.com/databricks/cli/libs/fileset" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/log" + "github.com/databricks/cli/libs/set" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/iam" ) type SyncOptions struct { LocalPath string RemotePath string + Include []string + Exclude []string Full bool @@ -23,13 +28,18 @@ type SyncOptions struct { WorkspaceClient *databricks.WorkspaceClient + CurrentUser *iam.User + Host string } type Sync struct { *SyncOptions - fileSet *git.FileSet + fileSet *git.FileSet + includeFileSet *fileset.GlobSet + excludeFileSet *fileset.GlobSet + snapshot *Snapshot filer filer.Filer @@ -49,8 +59,18 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) { return nil, err } + includeFileSet, err := fileset.NewGlobSet(opts.LocalPath, opts.Include) + if err != nil { + return nil, err + } + + excludeFileSet, err := fileset.NewGlobSet(opts.LocalPath, opts.Exclude) + if err != nil { + return nil, err + } + // Verify that the remote path we're about to synchronize to is valid and allowed. - err = EnsureRemotePathIsUsable(ctx, opts.WorkspaceClient, opts.RemotePath) + err = EnsureRemotePathIsUsable(ctx, opts.WorkspaceClient, opts.RemotePath, opts.CurrentUser) if err != nil { return nil, err } @@ -85,11 +105,13 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) { return &Sync{ SyncOptions: &opts, - fileSet: fileSet, - snapshot: snapshot, - filer: filer, - notifier: &NopNotifier{}, - seq: 0, + fileSet: fileSet, + includeFileSet: includeFileSet, + excludeFileSet: excludeFileSet, + snapshot: snapshot, + filer: filer, + notifier: &NopNotifier{}, + seq: 0, }, nil } @@ -129,15 +151,12 @@ func (s *Sync) notifyComplete(ctx context.Context, d diff) { } func (s *Sync) RunOnce(ctx context.Context) error { - // tradeoff: doing portable monitoring only due to macOS max descriptor manual ulimit setting requirement - // https://github.com/gorakhargosh/watchdog/blob/master/src/watchdog/observers/kqueue.py#L394-L418 - all, err := s.fileSet.All() + files, err := getFileList(ctx, s) if err != nil { - log.Errorf(ctx, "cannot list files: %s", err) return err } - change, err := s.snapshot.diff(ctx, all) + change, err := s.snapshot.diff(ctx, files) if err != nil { return err } @@ -163,6 +182,40 @@ func (s *Sync) RunOnce(ctx context.Context) error { return nil } +func getFileList(ctx context.Context, s *Sync) ([]fileset.File, error) { + // tradeoff: doing portable monitoring only due to macOS max descriptor manual ulimit setting requirement + // https://github.com/gorakhargosh/watchdog/blob/master/src/watchdog/observers/kqueue.py#L394-L418 + all := set.NewSetF(func(f fileset.File) string { + return f.Absolute + }) + gitFiles, err := s.fileSet.All() + if err != nil { + log.Errorf(ctx, "cannot list files: %s", err) + return nil, err + } + all.Add(gitFiles...) + + include, err := s.includeFileSet.All() + if err != nil { + log.Errorf(ctx, "cannot list include files: %s", err) + return nil, err + } + + all.Add(include...) + + exclude, err := s.excludeFileSet.All() + if err != nil { + log.Errorf(ctx, "cannot list exclude files: %s", err) + return nil, err + } + + for _, f := range exclude { + all.Remove(f) + } + + return all.Iter(), nil +} + func (s *Sync) DestroySnapshot(ctx context.Context) error { return s.snapshot.Destroy(ctx) } diff --git a/libs/sync/sync_test.go b/libs/sync/sync_test.go new file mode 100644 index 000000000..99c7e04b1 --- /dev/null +++ b/libs/sync/sync_test.go @@ -0,0 +1,119 @@ +package sync + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/libs/fileset" + "github.com/databricks/cli/libs/git" + "github.com/stretchr/testify/require" +) + +func createFile(dir string, name string) error { + f, err := os.Create(filepath.Join(dir, name)) + if err != nil { + return err + } + + return f.Close() +} + +func setupFiles(t *testing.T) string { + dir := t.TempDir() + + err := createFile(dir, "a.go") + require.NoError(t, err) + + err = createFile(dir, "b.go") + require.NoError(t, err) + + err = createFile(dir, "ab.go") + require.NoError(t, err) + + err = createFile(dir, "abc.go") + require.NoError(t, err) + + err = createFile(dir, "c.go") + require.NoError(t, err) + + err = createFile(dir, "d.go") + require.NoError(t, err) + + dbDir := filepath.Join(dir, ".databricks") + err = os.Mkdir(dbDir, 0755) + require.NoError(t, err) + + err = createFile(dbDir, "e.go") + require.NoError(t, err) + + return dir + +} + +func TestGetFileSet(t *testing.T) { + ctx := context.Background() + + dir := setupFiles(t) + fileSet, err := git.NewFileSet(dir) + require.NoError(t, err) + + err = fileSet.EnsureValidGitIgnoreExists() + require.NoError(t, err) + + inc, err := fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + excl, err := fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + s := &Sync{ + SyncOptions: &SyncOptions{}, + + fileSet: fileSet, + includeFileSet: inc, + excludeFileSet: excl, + } + + fileList, err := getFileList(ctx, s) + require.NoError(t, err) + require.Equal(t, len(fileList), 7) + + inc, err = fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + excl, err = fileset.NewGlobSet(dir, []string{"*.go"}) + require.NoError(t, err) + + s = &Sync{ + SyncOptions: &SyncOptions{}, + + fileSet: fileSet, + includeFileSet: inc, + excludeFileSet: excl, + } + + fileList, err = getFileList(ctx, s) + require.NoError(t, err) + require.Equal(t, len(fileList), 1) + + inc, err = fileset.NewGlobSet(dir, []string{".databricks/*.*"}) + require.NoError(t, err) + + excl, err = fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + s = &Sync{ + SyncOptions: &SyncOptions{}, + + fileSet: fileSet, + includeFileSet: inc, + excludeFileSet: excl, + } + + fileList, err = getFileList(ctx, s) + require.NoError(t, err) + require.Equal(t, len(fileList), 8) + +} diff --git a/libs/template/config.go b/libs/template/config.go new file mode 100644 index 000000000..21618ac9a --- /dev/null +++ b/libs/template/config.go @@ -0,0 +1,151 @@ +package template + +import ( + "context" + "fmt" + + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/jsonschema" + "golang.org/x/exp/maps" +) + +type config struct { + ctx context.Context + values map[string]any + schema *jsonschema.Schema +} + +func newConfig(ctx context.Context, schemaPath string) (*config, error) { + // Read config schema + schema, err := jsonschema.Load(schemaPath) + if err != nil { + return nil, err + } + if err := validateSchema(schema); err != nil { + return nil, err + } + + // Do not allow template input variables that are not defined in the schema. + schema.AdditionalProperties = false + + // Return config + return &config{ + ctx: ctx, + schema: schema, + values: make(map[string]any, 0), + }, nil +} + +func validateSchema(schema *jsonschema.Schema) error { + for _, v := range schema.Properties { + if v.Type == jsonschema.ArrayType || v.Type == jsonschema.ObjectType { + return fmt.Errorf("property type %s is not supported by bundle templates", v.Type) + } + } + return nil +} + +// Reads json file at path and assigns values from the file +func (c *config) assignValuesFromFile(path string) error { + // Load the config file. + configFromFile, err := c.schema.LoadInstance(path) + if err != nil { + return fmt.Errorf("failed to load config from file %s: %w", path, err) + } + + // Write configs from the file to the input map, not overwriting any existing + // configurations. + for name, val := range configFromFile { + if _, ok := c.values[name]; ok { + continue + } + c.values[name] = val + } + return nil +} + +// Assigns default values from schema to input config map +func (c *config) assignDefaultValues() error { + for name, property := range c.schema.Properties { + // Config already has a value assigned + if _, ok := c.values[name]; ok { + continue + } + // No default value defined for the property + if property.Default == nil { + continue + } + c.values[name] = property.Default + } + return nil +} + +// Prompts user for values for properties that do not have a value set yet +func (c *config) promptForValues() error { + for _, p := range c.schema.OrderedProperties() { + name := p.Name + property := p.Schema + + // Config already has a value assigned + if _, ok := c.values[name]; ok { + continue + } + + // Compute default value to display by converting it to a string + var defaultVal string + var err error + if property.Default != nil { + defaultVal, err = jsonschema.ToString(property.Default, property.Type) + if err != nil { + return err + } + } + + // Get user input by running the prompt + var userInput string + if property.Enum != nil { + // convert list of enums to string slice + enums, err := jsonschema.ToStringSlice(property.Enum, property.Type) + if err != nil { + return err + } + userInput, err = cmdio.AskSelect(c.ctx, property.Description, enums) + if err != nil { + return err + } + } else { + userInput, err = cmdio.Ask(c.ctx, property.Description, defaultVal) + if err != nil { + return err + } + + } + + // Convert user input string back to a value + c.values[name], err = jsonschema.FromString(userInput, property.Type) + if err != nil { + return err + } + } + return nil +} + +// Prompt user for any missing config values. Assign default values if +// terminal is not TTY +func (c *config) promptOrAssignDefaultValues() error { + if cmdio.IsOutTTY(c.ctx) && cmdio.IsInTTY(c.ctx) { + return c.promptForValues() + } + return c.assignDefaultValues() +} + +// Validates the configuration. If passes, the configuration is ready to be used +// to initialize the template. +func (c *config) validate() error { + // All properties in the JSON schema should have a value defined. + c.schema.Required = maps.Keys(c.schema.Properties) + if err := c.schema.ValidateInstance(c.values); err != nil { + return fmt.Errorf("validation for template input parameters failed. %w", err) + } + return nil +} diff --git a/libs/template/config_test.go b/libs/template/config_test.go new file mode 100644 index 000000000..1b1fc3383 --- /dev/null +++ b/libs/template/config_test.go @@ -0,0 +1,171 @@ +package template + +import ( + "context" + "testing" + + "github.com/databricks/cli/libs/jsonschema" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testConfig(t *testing.T) *config { + c, err := newConfig(context.Background(), "./testdata/config-test-schema/test-schema.json") + require.NoError(t, err) + return c +} + +func TestTemplateConfigAssignValuesFromFile(t *testing.T) { + c := testConfig(t) + + err := c.assignValuesFromFile("./testdata/config-assign-from-file/config.json") + assert.NoError(t, err) + + assert.Equal(t, int64(1), c.values["int_val"]) + assert.Equal(t, float64(2), c.values["float_val"]) + assert.Equal(t, true, c.values["bool_val"]) + assert.Equal(t, "hello", c.values["string_val"]) +} + +func TestTemplateConfigAssignValuesFromFileForInvalidIntegerValue(t *testing.T) { + c := testConfig(t) + + err := c.assignValuesFromFile("./testdata/config-assign-from-file-invalid-int/config.json") + assert.EqualError(t, err, "failed to load config from file ./testdata/config-assign-from-file-invalid-int/config.json: failed to parse property int_val: cannot convert \"abc\" to an integer") +} + +func TestTemplateConfigAssignValuesFromFileDoesNotOverwriteExistingConfigs(t *testing.T) { + c := testConfig(t) + c.values = map[string]any{ + "string_val": "this-is-not-overwritten", + } + + err := c.assignValuesFromFile("./testdata/config-assign-from-file/config.json") + assert.NoError(t, err) + + assert.Equal(t, int64(1), c.values["int_val"]) + assert.Equal(t, float64(2), c.values["float_val"]) + assert.Equal(t, true, c.values["bool_val"]) + assert.Equal(t, "this-is-not-overwritten", c.values["string_val"]) +} + +func TestTemplateConfigAssignDefaultValues(t *testing.T) { + c := testConfig(t) + + err := c.assignDefaultValues() + assert.NoError(t, err) + + assert.Len(t, c.values, 2) + assert.Equal(t, "abc", c.values["string_val"]) + assert.Equal(t, int64(123), c.values["int_val"]) +} + +func TestTemplateConfigValidateValuesDefined(t *testing.T) { + c := testConfig(t) + c.values = map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + + err := c.validate() + assert.EqualError(t, err, "validation for template input parameters failed. no value provided for required property string_val") +} + +func TestTemplateConfigValidateTypeForValidConfig(t *testing.T) { + c := testConfig(t) + c.values = map[string]any{ + "int_val": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", + } + + err := c.validate() + assert.NoError(t, err) +} + +func TestTemplateConfigValidateTypeForUnknownField(t *testing.T) { + c := testConfig(t) + c.values = map[string]any{ + "unknown_prop": 1, + "int_val": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", + } + + err := c.validate() + assert.EqualError(t, err, "validation for template input parameters failed. property unknown_prop is not defined in the schema") +} + +func TestTemplateConfigValidateTypeForInvalidType(t *testing.T) { + c := testConfig(t) + c.values = map[string]any{ + "int_val": "this-should-be-an-int", + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", + } + + err := c.validate() + assert.EqualError(t, err, "validation for template input parameters failed. incorrect type for property int_val: expected type integer, but value is \"this-should-be-an-int\"") +} + +func TestTemplateValidateSchema(t *testing.T) { + var err error + toSchema := func(s string) *jsonschema.Schema { + return &jsonschema.Schema{ + Properties: map[string]*jsonschema.Schema{ + "foo": { + Type: jsonschema.Type(s), + }, + }, + } + } + + err = validateSchema(toSchema("string")) + assert.NoError(t, err) + + err = validateSchema(toSchema("boolean")) + assert.NoError(t, err) + + err = validateSchema(toSchema("number")) + assert.NoError(t, err) + + err = validateSchema(toSchema("integer")) + assert.NoError(t, err) + + err = validateSchema(toSchema("object")) + assert.EqualError(t, err, "property type object is not supported by bundle templates") + + err = validateSchema(toSchema("array")) + assert.EqualError(t, err, "property type array is not supported by bundle templates") +} + +func TestTemplateEnumValidation(t *testing.T) { + schema := jsonschema.Schema{ + Properties: map[string]*jsonschema.Schema{ + "abc": { + Type: "integer", + Enum: []any{1, 2, 3, 4}, + }, + }, + } + + c := &config{ + schema: &schema, + values: map[string]any{ + "abc": 5, + }, + } + assert.EqualError(t, c.validate(), "validation for template input parameters failed. expected value of property abc to be one of [1 2 3 4]. Found: 5") + + c = &config{ + schema: &schema, + values: map[string]any{ + "abc": 4, + }, + } + assert.NoError(t, c.validate()) +} diff --git a/libs/template/file.go b/libs/template/file.go new file mode 100644 index 000000000..aafb1acfa --- /dev/null +++ b/libs/template/file.go @@ -0,0 +1,101 @@ +package template + +import ( + "context" + "io" + "io/fs" + "os" + "path/filepath" + + "github.com/databricks/cli/libs/filer" +) + +// Interface representing a file to be materialized from a template into a project +// instance +type file interface { + // Destination path for file. This is where the file will be created when + // PersistToDisk is called. + DstPath() *destinationPath + + // Write file to disk at the destination path. + PersistToDisk() error +} + +type destinationPath struct { + // Root path for the project instance. This path uses the system's default + // file separator. For example /foo/bar on Unix and C:\foo\bar on windows + root string + + // Unix like file path relative to the "root" of the instantiated project. Is used to + // evaluate whether the file should be skipped by comparing it to a list of + // skip glob patterns. + relPath string +} + +// Absolute path of the file, in the os native format. For example /foo/bar on +// Unix and C:\foo\bar on windows +func (f *destinationPath) absPath() string { + return filepath.Join(f.root, filepath.FromSlash(f.relPath)) +} + +type copyFile struct { + ctx context.Context + + // Permissions bits for the destination file + perm fs.FileMode + + dstPath *destinationPath + + // Filer rooted at template root. Used to read srcPath. + srcFiler filer.Filer + + // Relative path from template root for file to be copied. + srcPath string +} + +func (f *copyFile) DstPath() *destinationPath { + return f.dstPath +} + +func (f *copyFile) PersistToDisk() error { + path := f.DstPath().absPath() + err := os.MkdirAll(filepath.Dir(path), 0755) + if err != nil { + return err + } + srcFile, err := f.srcFiler.Read(f.ctx, f.srcPath) + if err != nil { + return err + } + defer srcFile.Close() + dstFile, err := os.OpenFile(path, os.O_CREATE|os.O_EXCL|os.O_WRONLY, f.perm) + if err != nil { + return err + } + defer dstFile.Close() + _, err = io.Copy(dstFile, srcFile) + return err +} + +type inMemoryFile struct { + dstPath *destinationPath + + content []byte + + // Permissions bits for the destination file + perm fs.FileMode +} + +func (f *inMemoryFile) DstPath() *destinationPath { + return f.dstPath +} + +func (f *inMemoryFile) PersistToDisk() error { + path := f.DstPath().absPath() + + err := os.MkdirAll(filepath.Dir(path), 0755) + if err != nil { + return err + } + return os.WriteFile(path, f.content, f.perm) +} diff --git a/libs/template/file_test.go b/libs/template/file_test.go new file mode 100644 index 000000000..85938895e --- /dev/null +++ b/libs/template/file_test.go @@ -0,0 +1,111 @@ +package template + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/databricks/cli/libs/filer" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testInMemoryFile(t *testing.T, perm fs.FileMode) { + tmpDir := t.TempDir() + + f := &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/c", + }, + perm: perm, + content: []byte("123"), + } + err := f.PersistToDisk() + assert.NoError(t, err) + + assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "123") + assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), perm) +} + +func testCopyFile(t *testing.T, perm fs.FileMode) { + tmpDir := t.TempDir() + + templateFiler, err := filer.NewLocalClient(tmpDir) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(tmpDir, "source"), []byte("qwerty"), perm) + require.NoError(t, err) + + f := ©File{ + ctx: context.Background(), + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/c", + }, + perm: perm, + srcPath: "source", + srcFiler: templateFiler, + } + err = f.PersistToDisk() + assert.NoError(t, err) + + assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "qwerty") + assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), perm) +} + +func TestTemplateFileDestinationPath(t *testing.T) { + if runtime.GOOS == "windows" { + t.SkipNow() + } + f := &destinationPath{ + root: `a/b/c`, + relPath: "d/e", + } + assert.Equal(t, `a/b/c/d/e`, f.absPath()) +} + +func TestTemplateFileDestinationPathForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + f := &destinationPath{ + root: `c:\a\b\c`, + relPath: "d/e", + } + assert.Equal(t, `c:\a\b\c\d\e`, f.absPath()) +} + +func TestTemplateInMemoryFilePersistToDisk(t *testing.T) { + if runtime.GOOS == "windows" { + t.SkipNow() + } + testInMemoryFile(t, 0755) +} + +func TestTemplateInMemoryFilePersistToDiskForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + // we have separate tests for windows because of differences in valid + // fs.FileMode values we can use for different operating systems. + testInMemoryFile(t, 0666) +} + +func TestTemplateCopyFilePersistToDisk(t *testing.T) { + if runtime.GOOS == "windows" { + t.SkipNow() + } + testCopyFile(t, 0644) +} + +func TestTemplateCopyFilePersistToDiskForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + // we have separate tests for windows because of differences in valid + // fs.FileMode values we can use for different operating systems. + testCopyFile(t, 0666) +} diff --git a/libs/template/helpers.go b/libs/template/helpers.go new file mode 100644 index 000000000..317522703 --- /dev/null +++ b/libs/template/helpers.go @@ -0,0 +1,113 @@ +package template + +import ( + "context" + "errors" + "fmt" + "net/url" + "regexp" + "text/template" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/auth" + "github.com/databricks/databricks-sdk-go/service/iam" +) + +type ErrFail struct { + msg string +} + +func (err ErrFail) Error() string { + return err.msg +} + +type pair struct { + k string + v any +} + +var cachedUser *iam.User +var cachedIsServicePrincipal *bool + +func loadHelpers(ctx context.Context) template.FuncMap { + w := root.WorkspaceClient(ctx) + return template.FuncMap{ + "fail": func(format string, args ...any) (any, error) { + return nil, ErrFail{fmt.Sprintf(format, args...)} + }, + // Alias for https://pkg.go.dev/net/url#Parse. Allows usage of all methods of url.URL + "url": func(rawUrl string) (*url.URL, error) { + return url.Parse(rawUrl) + }, + // Alias for https://pkg.go.dev/regexp#Compile. Allows usage of all methods of regexp.Regexp + "regexp": func(expr string) (*regexp.Regexp, error) { + return regexp.Compile(expr) + }, + // A key value pair. This is used with the map function to generate maps + // to use inside a template + "pair": func(k string, v any) pair { + return pair{k, v} + }, + // map converts a list of pairs to a map object. This is useful to pass multiple + // objects to templates defined in the library directory. Go text template + // syntax for invoking a template only allows specifying a single argument, + // this function can be used to workaround that limitation. + // + // For example: {{template "my_template" (map (pair "foo" $arg1) (pair "bar" $arg2))}} + // $arg1 and $arg2 can be referred from inside "my_template" as ".foo" and ".bar" + "map": func(pairs ...pair) map[string]any { + result := make(map[string]any, 0) + for _, p := range pairs { + result[p.k] = p.v + } + return result + }, + // Get smallest node type (follows Terraform's GetSmallestNodeType) + "smallest_node_type": func() (string, error) { + if w.Config.Host == "" { + return "", errors.New("cannot determine target workspace, please first setup a configuration profile using 'databricks auth login'") + } + if w.Config.IsAzure() { + return "Standard_D3_v2", nil + } else if w.Config.IsGcp() { + return "n1-standard-4", nil + } + return "i3.xlarge", nil + }, + "workspace_host": func() (string, error) { + if w.Config.Host == "" { + return "", errors.New("cannot determine target workspace, please first setup a configuration profile using 'databricks auth login'") + } + return w.Config.Host, nil + }, + "user_name": func() (string, error) { + if cachedUser == nil { + var err error + cachedUser, err = w.CurrentUser.Me(ctx) + if err != nil { + return "", err + } + } + result := cachedUser.UserName + if result == "" { + result = cachedUser.Id + } + return result, nil + }, + "is_service_principal": func() (bool, error) { + if cachedIsServicePrincipal != nil { + return *cachedIsServicePrincipal, nil + } + if cachedUser == nil { + var err error + cachedUser, err = w.CurrentUser.Me(ctx) + if err != nil { + return false, err + } + } + result := auth.IsServicePrincipal(cachedUser.Id) + cachedIsServicePrincipal = &result + return result, nil + }, + } +} diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go new file mode 100644 index 000000000..d495ae895 --- /dev/null +++ b/libs/template/helpers_test.go @@ -0,0 +1,131 @@ +package template + +import ( + "context" + "os" + "strings" + "testing" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go" + workspaceConfig "github.com/databricks/databricks-sdk-go/config" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTemplatePrintStringWithoutProcessing(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/print-without-processing/template", "./testdata/print-without-processing/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + cleanContent := strings.Trim(string(r.files[0].(*inMemoryFile).content), "\n\r") + assert.Equal(t, `{{ fail "abc" }}`, cleanContent) +} + +func TestTemplateRegexpCompileFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/regexp-compile/template", "./testdata/regexp-compile/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + content := string(r.files[0].(*inMemoryFile).content) + assert.Contains(t, content, "0:food") + assert.Contains(t, content, "1:fool") +} + +func TestTemplateUrlFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/urlparse-function/template", "./testdata/urlparse-function/library", tmpDir) + + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Equal(t, "https://www.databricks.com", string(r.files[0].(*inMemoryFile).content)) +} + +func TestTemplateMapPairFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/map-pair/template", "./testdata/map-pair/library", tmpDir) + + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Equal(t, "false 123 hello 12.3", string(r.files[0].(*inMemoryFile).content)) +} + +func TestWorkspaceHost(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + w := &databricks.WorkspaceClient{ + Config: &workspaceConfig.Config{ + Host: "https://myhost.com", + }, + } + ctx = root.SetWorkspaceClient(ctx, w) + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/workspace-host/template", "./testdata/map-pair/library", tmpDir) + + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Contains(t, string(r.files[0].(*inMemoryFile).content), "https://myhost.com") + assert.Contains(t, string(r.files[0].(*inMemoryFile).content), "i3.xlarge") + +} + +func TestWorkspaceHostNotConfigured(t *testing.T) { + ctx := context.Background() + cmd := cmdio.NewIO(flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "template") + ctx = cmdio.InContext(ctx, cmd) + tmpDir := t.TempDir() + + w := &databricks.WorkspaceClient{ + Config: &workspaceConfig.Config{}, + } + ctx = root.SetWorkspaceClient(ctx, w) + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/workspace-host/template", "./testdata/map-pair/library", tmpDir) + + assert.NoError(t, err) + + err = r.walk() + require.ErrorContains(t, err, "cannot determine target workspace") + +} diff --git a/libs/template/materialize.go b/libs/template/materialize.go new file mode 100644 index 000000000..8517858fd --- /dev/null +++ b/libs/template/materialize.go @@ -0,0 +1,122 @@ +package template + +import ( + "context" + "embed" + "io/fs" + "os" + "path" + "path/filepath" + + "github.com/databricks/cli/libs/cmdio" +) + +const libraryDirName = "library" +const templateDirName = "template" +const schemaFileName = "databricks_template_schema.json" + +//go:embed all:templates +var builtinTemplates embed.FS + +// This function materializes the input templates as a project, using user defined +// configurations. +// Parameters: +// +// ctx: context containing a cmdio object. This is used to prompt the user +// configFilePath: file path containing user defined config values +// templateRoot: root of the template definition +// outputDir: root of directory where to initialize the template +func Materialize(ctx context.Context, configFilePath, templateRoot, outputDir string) error { + // Use a temporary directory in case any builtin templates like default-python are used + tempDir, err := os.MkdirTemp("", "templates") + defer os.RemoveAll(tempDir) + if err != nil { + return err + } + templateRoot, err = prepareBuiltinTemplates(templateRoot, tempDir) + if err != nil { + return err + } + + templatePath := filepath.Join(templateRoot, templateDirName) + libraryPath := filepath.Join(templateRoot, libraryDirName) + schemaPath := filepath.Join(templateRoot, schemaFileName) + helpers := loadHelpers(ctx) + + config, err := newConfig(ctx, schemaPath) + if err != nil { + return err + } + + // Read and assign config values from file + if configFilePath != "" { + err = config.assignValuesFromFile(configFilePath) + if err != nil { + return err + } + } + + // Prompt user for any missing config values. Assign default values if + // terminal is not TTY + err = config.promptOrAssignDefaultValues() + if err != nil { + return err + } + + err = config.validate() + if err != nil { + return err + } + + // Walk and render the template, since input configuration is complete + r, err := newRenderer(ctx, config.values, helpers, templatePath, libraryPath, outputDir) + if err != nil { + return err + } + err = r.walk() + if err != nil { + return err + } + + err = r.persistToDisk() + if err != nil { + return err + } + cmdio.LogString(ctx, "✨ Successfully initialized template") + return nil +} + +// If the given templateRoot matches +func prepareBuiltinTemplates(templateRoot string, tempDir string) (string, error) { + _, err := fs.Stat(builtinTemplates, path.Join("templates", templateRoot)) + if err != nil { + // The given path doesn't appear to be using out built-in templates + return templateRoot, nil + } + + // We have a built-in template with the same name as templateRoot! + // Now we need to make a fully copy of the builtin templates to a real file system + // since template.Parse() doesn't support embed.FS. + err = fs.WalkDir(builtinTemplates, "templates", func(path string, entry fs.DirEntry, err error) error { + if err != nil { + return err + } + + targetPath := filepath.Join(tempDir, path) + if entry.IsDir() { + return os.Mkdir(targetPath, 0755) + } else { + content, err := fs.ReadFile(builtinTemplates, path) + if err != nil { + return err + } + return os.WriteFile(targetPath, content, 0644) + } + }) + + if err != nil { + return "", err + } + + return filepath.Join(tempDir, "templates", templateRoot), nil +} diff --git a/libs/template/renderer.go b/libs/template/renderer.go new file mode 100644 index 000000000..f674ea0fb --- /dev/null +++ b/libs/template/renderer.go @@ -0,0 +1,321 @@ +package template + +import ( + "context" + "errors" + "fmt" + "io" + "os" + "path" + "path/filepath" + "slices" + "sort" + "strings" + "text/template" + + "github.com/databricks/cli/libs/filer" + "github.com/databricks/cli/libs/log" + "github.com/databricks/databricks-sdk-go/logger" +) + +const templateExtension = ".tmpl" + +// Renders a databricks template as a project +type renderer struct { + ctx context.Context + + // A config that is the "dot" value available to any template being rendered. + // Refer to https://pkg.go.dev/text/template for how templates can use + // this "dot" value + config map[string]any + + // A base template with helper functions and user defined templates in the + // library directory loaded. This is cloned for each project template computation + // during file tree walk + baseTemplate *template.Template + + // List of in memory files generated from template + files []file + + // Glob patterns for files and directories to skip. There are three possible + // outcomes for skip: + // + // 1. File is not generated. This happens if one of the file's parent directories + // match a glob pattern + // + // 2. File is generated but not persisted to disk. This happens if the file itself + // matches a glob pattern, but none of it's parents match a glob pattern from the list + // + // 3. File is persisted to disk. This happens if the file and it's parent directories + // do not match any glob patterns from this list + skipPatterns []string + + // Filer rooted at template root. The file tree from this root is walked to + // generate the project + templateFiler filer.Filer + + // Root directory for the project instantiated from the template + instanceRoot string +} + +func newRenderer(ctx context.Context, config map[string]any, helpers template.FuncMap, templateRoot, libraryRoot, instanceRoot string) (*renderer, error) { + // Initialize new template, with helper functions loaded + tmpl := template.New("").Funcs(helpers) + + // Load user defined associated templates from the library root + libraryGlob := filepath.Join(libraryRoot, "*") + matches, err := filepath.Glob(libraryGlob) + if err != nil { + return nil, err + } + if len(matches) != 0 { + tmpl, err = tmpl.ParseFiles(matches...) + if err != nil { + return nil, err + } + } + + templateFiler, err := filer.NewLocalClient(templateRoot) + if err != nil { + return nil, err + } + + ctx = log.NewContext(ctx, log.GetLogger(ctx).With("action", "initialize-template")) + + return &renderer{ + ctx: ctx, + config: config, + baseTemplate: tmpl, + files: make([]file, 0), + skipPatterns: make([]string, 0), + templateFiler: templateFiler, + instanceRoot: instanceRoot, + }, nil +} + +// Executes the template by applying config on it. Returns the materialized template +// as a string +func (r *renderer) executeTemplate(templateDefinition string) (string, error) { + // Create copy of base template so as to not overwrite it + tmpl, err := r.baseTemplate.Clone() + if err != nil { + return "", err + } + + // Parse the template text + tmpl, err = tmpl.Parse(templateDefinition) + if err != nil { + return "", fmt.Errorf("error in %s: %w", templateDefinition, err) + } + + // Execute template and get result + result := strings.Builder{} + err = tmpl.Execute(&result, r.config) + if err != nil { + return "", err + } + return result.String(), nil +} + +func (r *renderer) computeFile(relPathTemplate string) (file, error) { + // read file permissions + info, err := r.templateFiler.Stat(r.ctx, relPathTemplate) + if err != nil { + return nil, err + } + perm := info.Mode().Perm() + + // Execute relative path template to get destination path for the file + relPath, err := r.executeTemplate(relPathTemplate) + if err != nil { + return nil, err + } + + // If file name does not specify the `.tmpl` extension, then it is copied + // over as is, without treating it as a template + if !strings.HasSuffix(relPathTemplate, templateExtension) { + return ©File{ + dstPath: &destinationPath{ + root: r.instanceRoot, + relPath: relPath, + }, + perm: perm, + ctx: r.ctx, + srcPath: relPathTemplate, + srcFiler: r.templateFiler, + }, nil + } else { + // Trim the .tmpl suffix from file name, if specified in the template + // path + relPath = strings.TrimSuffix(relPath, templateExtension) + } + + // read template file's content + templateReader, err := r.templateFiler.Read(r.ctx, relPathTemplate) + if err != nil { + return nil, err + } + defer templateReader.Close() + + // execute the contents of the file as a template + contentTemplate, err := io.ReadAll(templateReader) + if err != nil { + return nil, err + } + content, err := r.executeTemplate(string(contentTemplate)) + // Capture errors caused by the "fail" helper function + if target := (&ErrFail{}); errors.As(err, target) { + return nil, target + } + if err != nil { + return nil, fmt.Errorf("failed to compute file content for %s. %w", relPathTemplate, err) + } + + return &inMemoryFile{ + dstPath: &destinationPath{ + root: r.instanceRoot, + relPath: relPath, + }, + perm: perm, + content: []byte(content), + }, nil +} + +// This function walks the template file tree to generate an in memory representation +// of a project. +// +// During file tree walk, in the current directory, we would like to determine +// all possible {{skip}} function calls before we process any of the directories +// so that we can skip them eagerly if needed. That is in the current working directory +// we would like to process all files before we process any of the directories. +// +// This is not possible using the std library WalkDir which processes the files in +// lexical order which is why this function implements BFS. +func (r *renderer) walk() error { + directories := []string{"."} + var currentDirectory string + + for len(directories) > 0 { + currentDirectory, directories = directories[0], directories[1:] + + // Skip current directory if it matches any of accumulated skip patterns + instanceDirectory, err := r.executeTemplate(currentDirectory) + if err != nil { + return err + } + match, err := isSkipped(instanceDirectory, r.skipPatterns) + if err != nil { + return err + } + if match { + logger.Infof(r.ctx, "skipping directory: %s", instanceDirectory) + continue + } + + // Add skip function, which accumulates skip patterns relative to current + // directory + r.baseTemplate.Funcs(template.FuncMap{ + "skip": func(relPattern string) (string, error) { + // patterns are specified relative to current directory of the file + // the {{skip}} function is called from. + patternRaw := path.Join(currentDirectory, relPattern) + pattern, err := r.executeTemplate(patternRaw) + if err != nil { + return "", err + } + + if !slices.Contains(r.skipPatterns, pattern) { + logger.Infof(r.ctx, "adding skip pattern: %s", pattern) + r.skipPatterns = append(r.skipPatterns, pattern) + } + // return empty string will print nothing at function call site + // when executing the template + return "", nil + }, + }) + + // Process all entries in current directory + // + // 1. For files: the templates in the file name and content are executed, and + // a in memory representation of the file is generated + // + // 2. For directories: They are appended to a slice, which acts as a queue + // allowing BFS traversal of the template file tree + entries, err := r.templateFiler.ReadDir(r.ctx, currentDirectory) + if err != nil { + return err + } + // Sort by name to ensure deterministic ordering + sort.Slice(entries, func(i, j int) bool { + return entries[i].Name() < entries[j].Name() + }) + for _, entry := range entries { + if entry.IsDir() { + // Add to slice, for BFS traversal + directories = append(directories, path.Join(currentDirectory, entry.Name())) + continue + } + + // Generate in memory representation of file + f, err := r.computeFile(path.Join(currentDirectory, entry.Name())) + if err != nil { + return err + } + logger.Infof(r.ctx, "added file to list of possible project files: %s", f.DstPath().relPath) + r.files = append(r.files, f) + } + + } + return nil +} + +func (r *renderer) persistToDisk() error { + // Accumulate files which we will persist, skipping files whose path matches + // any of the skip patterns + filesToPersist := make([]file, 0) + for _, file := range r.files { + match, err := isSkipped(file.DstPath().relPath, r.skipPatterns) + if err != nil { + return err + } + if match { + log.Infof(r.ctx, "skipping file: %s", file.DstPath()) + continue + } + filesToPersist = append(filesToPersist, file) + } + + // Assert no conflicting files exist + for _, file := range filesToPersist { + path := file.DstPath().absPath() + _, err := os.Stat(path) + if err == nil { + return fmt.Errorf("failed to persist to disk, conflict with existing file: %s", path) + } + if err != nil && !os.IsNotExist(err) { + return fmt.Errorf("error while verifying file %s does not already exist: %w", path, err) + } + } + + // Persist files to disk + for _, file := range filesToPersist { + err := file.PersistToDisk() + if err != nil { + return err + } + } + return nil +} + +func isSkipped(filePath string, patterns []string) (bool, error) { + for _, pattern := range patterns { + isMatch, err := path.Match(pattern, filePath) + if err != nil { + return false, err + } + if isMatch { + return true, nil + } + } + return false, nil +} diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go new file mode 100644 index 000000000..21dd1e4fa --- /dev/null +++ b/libs/template/renderer_test.go @@ -0,0 +1,595 @@ +package template + +import ( + "context" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "runtime" + "strings" + "testing" + "text/template" + + "github.com/databricks/cli/bundle" + bundleConfig "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/databricks-sdk-go" + workspaceConfig "github.com/databricks/databricks-sdk-go/config" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func assertFileContent(t *testing.T, path string, content string) { + b, err := os.ReadFile(path) + require.NoError(t, err) + assert.Equal(t, content, string(b)) +} + +func assertFilePermissions(t *testing.T, path string, perm fs.FileMode) { + info, err := os.Stat(path) + require.NoError(t, err) + assert.Equal(t, perm, info.Mode().Perm()) +} + +func assertBuiltinTemplateValid(t *testing.T, settings map[string]any, target string, isServicePrincipal bool, build bool, tempDir string) { + ctx := context.Background() + + templatePath, err := prepareBuiltinTemplates("default-python", tempDir) + require.NoError(t, err) + + w := &databricks.WorkspaceClient{ + Config: &workspaceConfig.Config{Host: "https://myhost.com"}, + } + + // Prepare helpers + cachedUser = &iam.User{UserName: "user@domain.com"} + cachedIsServicePrincipal = &isServicePrincipal + ctx = root.SetWorkspaceClient(ctx, w) + helpers := loadHelpers(ctx) + + renderer, err := newRenderer(ctx, settings, helpers, templatePath, "./testdata/template-in-path/library", tempDir) + require.NoError(t, err) + + // Evaluate template + err = renderer.walk() + require.NoError(t, err) + err = renderer.persistToDisk() + require.NoError(t, err) + b, err := bundle.Load(ctx, filepath.Join(tempDir, "template", "my_project")) + require.NoError(t, err) + + // Apply initialize / validation mutators + b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser} + b.WorkspaceClient() + b.Config.Bundle.Terraform = &bundleConfig.Terraform{ + ExecPath: "sh", + } + err = bundle.Apply(ctx, b, bundle.Seq( + bundle.Seq(mutator.DefaultMutators()...), + mutator.SelectTarget(target), + phases.Initialize(), + )) + require.NoError(t, err) + + // Apply build mutator + if build { + err = bundle.Apply(ctx, b, phases.Build()) + require.NoError(t, err) + } +} + +func TestBuiltinTemplateValid(t *testing.T) { + // Test option combinations + options := []string{"yes", "no"} + isServicePrincipal := false + build := false + for _, includeNotebook := range options { + for _, includeDlt := range options { + for _, includePython := range options { + for _, isServicePrincipal := range []bool{true, false} { + config := map[string]any{ + "project_name": "my_project", + "include_notebook": includeNotebook, + "include_dlt": includeDlt, + "include_python": includePython, + } + tempDir := t.TempDir() + assertBuiltinTemplateValid(t, config, "dev", isServicePrincipal, build, tempDir) + } + } + } + } + + // Test prod mode + build + config := map[string]any{ + "project_name": "my_project", + "include_notebook": "yes", + "include_dlt": "yes", + "include_python": "yes", + } + isServicePrincipal = false + build = true + + // On Windows, we can't always remove the resulting temp dir since background + // processes might have it open, so we use 'defer' for a best-effort cleanup + tempDir, err := os.MkdirTemp("", "templates") + require.NoError(t, err) + defer os.RemoveAll(tempDir) + + assertBuiltinTemplateValid(t, config, "prod", isServicePrincipal, build, tempDir) + defer os.RemoveAll(tempDir) +} + +func TestRendererWithAssociatedTemplateInLibrary(t *testing.T) { + tmpDir := t.TempDir() + + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/email/template", "./testdata/email/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + require.NoError(t, err) + + err = r.persistToDisk() + require.NoError(t, err) + + b, err := os.ReadFile(filepath.Join(tmpDir, "my_email")) + require.NoError(t, err) + assert.Equal(t, "shreyas.goenka@databricks.com", strings.Trim(string(b), "\n\r")) +} + +func TestRendererExecuteTemplate(t *testing.T) { + templateText := + `"{{.count}} items are made of {{.Material}}". +{{if eq .Animal "sheep" }} +Sheep wool is the best! +{{else}} +{{.Animal}} wool is not too bad... +{{end}} +My email is {{template "email"}} +` + + r := renderer{ + config: map[string]any{ + "Material": "wool", + "count": 1, + "Animal": "sheep", + }, + baseTemplate: template.Must(template.New("base").Parse(`{{define "email"}}shreyas.goenka@databricks.com{{end}}`)), + } + + statement, err := r.executeTemplate(templateText) + require.NoError(t, err) + assert.Contains(t, statement, `"1 items are made of wool"`) + assert.NotContains(t, statement, `cat wool is not too bad.."`) + assert.Contains(t, statement, "Sheep wool is the best!") + assert.Contains(t, statement, `My email is shreyas.goenka@databricks.com`) + + r = renderer{ + config: map[string]any{ + "Material": "wool", + "count": 1, + "Animal": "cat", + }, + baseTemplate: template.Must(template.New("base").Parse(`{{define "email"}}hrithik.roshan@databricks.com{{end}}`)), + } + + statement, err = r.executeTemplate(templateText) + require.NoError(t, err) + assert.Contains(t, statement, `"1 items are made of wool"`) + assert.Contains(t, statement, `cat wool is not too bad...`) + assert.NotContains(t, statement, "Sheep wool is the best!") + assert.Contains(t, statement, `My email is hrithik.roshan@databricks.com`) +} + +func TestRendererIsSkipped(t *testing.T) { + + skipPatterns := []string{"a*", "*yz", "def", "a/b/*"} + + // skipped paths + match, err := isSkipped("abc", skipPatterns) + require.NoError(t, err) + assert.True(t, match) + + match, err = isSkipped("abcd", skipPatterns) + require.NoError(t, err) + assert.True(t, match) + + match, err = isSkipped("a", skipPatterns) + require.NoError(t, err) + assert.True(t, match) + + match, err = isSkipped("xxyz", skipPatterns) + require.NoError(t, err) + assert.True(t, match) + + match, err = isSkipped("yz", skipPatterns) + require.NoError(t, err) + assert.True(t, match) + + match, err = isSkipped("a/b/c", skipPatterns) + require.NoError(t, err) + assert.True(t, match) + + // NOT skipped paths + match, err = isSkipped(".", skipPatterns) + require.NoError(t, err) + assert.False(t, match) + + match, err = isSkipped("y", skipPatterns) + require.NoError(t, err) + assert.False(t, match) + + match, err = isSkipped("z", skipPatterns) + require.NoError(t, err) + assert.False(t, match) + + match, err = isSkipped("defg", skipPatterns) + require.NoError(t, err) + assert.False(t, match) + + match, err = isSkipped("cat", skipPatterns) + require.NoError(t, err) + assert.False(t, match) + + match, err = isSkipped("a/b/c/d", skipPatterns) + require.NoError(t, err) + assert.False(t, match) +} + +func TestRendererPersistToDisk(t *testing.T) { + tmpDir := t.TempDir() + ctx := context.Background() + + r := &renderer{ + ctx: ctx, + instanceRoot: tmpDir, + skipPatterns: []string{"a/b/c", "mn*"}, + files: []file{ + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/c", + }, + perm: 0444, + content: nil, + }, + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "mno", + }, + perm: 0444, + content: nil, + }, + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/d", + }, + perm: 0444, + content: []byte("123"), + }, + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "mmnn", + }, + perm: 0444, + content: []byte("456"), + }, + }, + } + + err := r.persistToDisk() + require.NoError(t, err) + + assert.NoFileExists(t, filepath.Join(tmpDir, "a", "b", "c")) + assert.NoFileExists(t, filepath.Join(tmpDir, "mno")) + + assertFileContent(t, filepath.Join(tmpDir, "a", "b", "d"), "123") + assertFilePermissions(t, filepath.Join(tmpDir, "a", "b", "d"), 0444) + assertFileContent(t, filepath.Join(tmpDir, "mmnn"), "456") + assertFilePermissions(t, filepath.Join(tmpDir, "mmnn"), 0444) +} + +func TestRendererWalk(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/walk/template", "./testdata/walk/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + getContent := func(r *renderer, path string) string { + for _, f := range r.files { + if f.DstPath().relPath != path { + continue + } + switch v := f.(type) { + case *inMemoryFile: + return strings.Trim(string(v.content), "\r\n") + case *copyFile: + r, err := r.templateFiler.Read(context.Background(), v.srcPath) + require.NoError(t, err) + b, err := io.ReadAll(r) + require.NoError(t, err) + return strings.Trim(string(b), "\r\n") + default: + require.FailNow(t, "execution should not reach here") + } + } + require.FailNow(t, "file is absent: "+path) + return "" + } + + assert.Len(t, r.files, 4) + assert.Equal(t, "file one", getContent(r, "file1")) + assert.Equal(t, "file two", getContent(r, "file2")) + assert.Equal(t, "file three", getContent(r, "dir1/dir3/file3")) + assert.Equal(t, "file four", getContent(r, "dir2/file4")) +} + +func TestRendererFailFunction(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/fail/template", "./testdata/fail/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.Equal(t, "I am an error message", err.Error()) +} + +func TestRendererSkipsDirsEagerly(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip-dir-eagerly/template", "./testdata/skip-dir-eagerly/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + content := string(r.files[0].(*inMemoryFile).content) + assert.Equal(t, "I should be the only file created", strings.Trim(content, "\r\n")) +} + +func TestRendererSkipAllFilesInCurrentDirectory(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip-all-files-in-cwd/template", "./testdata/skip-all-files-in-cwd/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + // All 3 files are executed and have in memory representations + require.Len(t, r.files, 3) + + err = r.persistToDisk() + require.NoError(t, err) + + entries, err := os.ReadDir(tmpDir) + require.NoError(t, err) + // Assert none of the files are persisted to disk, because of {{skip "*"}} + assert.Len(t, entries, 0) +} + +func TestRendererSkipPatternsAreRelativeToFileDirectory(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip-is-relative/template", "./testdata/skip-is-relative/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.skipPatterns, 3) + assert.Contains(t, r.skipPatterns, "a") + assert.Contains(t, r.skipPatterns, "dir1/b") + assert.Contains(t, r.skipPatterns, "dir1/dir2/c") +} + +func TestRendererSkip(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip/template", "./testdata/skip/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + // All 6 files are computed, even though "dir2/*" is present as a skip pattern + // This is because "dir2/*" matches the files in dir2, but not dir2 itself + assert.Len(t, r.files, 6) + + err = r.persistToDisk() + require.NoError(t, err) + + assert.FileExists(t, filepath.Join(tmpDir, "file1")) + assert.FileExists(t, filepath.Join(tmpDir, "file2")) + assert.FileExists(t, filepath.Join(tmpDir, "dir1/file5")) + + // These files have been skipped + assert.NoFileExists(t, filepath.Join(tmpDir, "file3")) + assert.NoFileExists(t, filepath.Join(tmpDir, "dir1/file4")) + assert.NoDirExists(t, filepath.Join(tmpDir, "dir2")) + assert.NoFileExists(t, filepath.Join(tmpDir, "dir2/file6")) +} + +func TestRendererReadsPermissionsBits(t *testing.T) { + if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { + t.SkipNow() + } + tmpDir := t.TempDir() + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/executable-bit-read/template", "./testdata/executable-bit-read/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + getPermissions := func(r *renderer, path string) fs.FileMode { + for _, f := range r.files { + if f.DstPath().relPath != path { + continue + } + switch v := f.(type) { + case *inMemoryFile: + return v.perm + case *copyFile: + return v.perm + default: + require.FailNow(t, "execution should not reach here") + } + } + require.FailNow(t, "file is absent: "+path) + return 0 + } + + assert.Len(t, r.files, 2) + assert.Equal(t, getPermissions(r, "script.sh"), fs.FileMode(0755)) + assert.Equal(t, getPermissions(r, "not-a-script"), fs.FileMode(0644)) +} + +func TestRendererErrorOnConflictingFile(t *testing.T) { + tmpDir := t.TempDir() + + f, err := os.Create(filepath.Join(tmpDir, "a")) + require.NoError(t, err) + err = f.Close() + require.NoError(t, err) + + r := renderer{ + skipPatterns: []string{}, + files: []file{ + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a", + }, + perm: 0444, + content: []byte("123"), + }, + }, + } + err = r.persistToDisk() + assert.EqualError(t, err, fmt.Sprintf("failed to persist to disk, conflict with existing file: %s", filepath.Join(tmpDir, "a"))) +} + +func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) { + tmpDir := t.TempDir() + ctx := context.Background() + + f, err := os.Create(filepath.Join(tmpDir, "a")) + require.NoError(t, err) + err = f.Close() + require.NoError(t, err) + + r := renderer{ + ctx: ctx, + skipPatterns: []string{"a"}, + files: []file{ + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a", + }, + perm: 0444, + content: []byte("123"), + }, + }, + } + err = r.persistToDisk() + // No error is returned even though a conflicting file exists. This is because + // the generated file is being skipped + assert.NoError(t, err) + assert.Len(t, r.files, 1) +} + +func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/copy-file-walk/template", "./testdata/copy-file-walk/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Equal(t, r.files[0].(*copyFile).srcPath, "not-a-template") + assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "not-a-template")) +} + +func TestRendererFileTreeRendering(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, map[string]any{ + "dir_name": "my_directory", + "file_name": "my_file", + }, helpers, "./testdata/file-tree-rendering/template", "./testdata/file-tree-rendering/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + // Assert in memory representation is created. + assert.Len(t, r.files, 1) + assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "my_directory", "my_file")) + + err = r.persistToDisk() + require.NoError(t, err) + + // Assert files and directories are correctly materialized. + assert.DirExists(t, filepath.Join(tmpDir, "my_directory")) + assert.FileExists(t, filepath.Join(tmpDir, "my_directory", "my_file")) +} + +func TestRendererSubTemplateInPath(t *testing.T) { + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + tmpDir := t.TempDir() + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/template-in-path/template", "./testdata/template-in-path/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + require.NoError(t, err) + + assert.Equal(t, filepath.Join(tmpDir, "my_directory", "my_file"), r.files[0].DstPath().absPath()) + assert.Equal(t, "my_directory/my_file", r.files[0].DstPath().relPath) +} diff --git a/libs/template/templates/default-python/databricks_template_schema.json b/libs/template/templates/default-python/databricks_template_schema.json new file mode 100644 index 000000000..db8adcce1 --- /dev/null +++ b/libs/template/templates/default-python/databricks_template_schema.json @@ -0,0 +1,31 @@ +{ + "properties": { + "project_name": { + "type": "string", + "default": "my_project", + "description": "Unique name for this project", + "order": 1 + }, + "include_notebook": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Include a stub (sample) notebook in 'my_project/src'", + "order": 2 + }, + "include_dlt": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Include a stub (sample) DLT pipeline in 'my_project/src'", + "order": 3 + }, + "include_python": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Include a stub (sample) Python package 'my_project/src'", + "order": 4 + } + } +} diff --git a/libs/template/templates/default-python/template/__preamble.tmpl b/libs/template/templates/default-python/template/__preamble.tmpl new file mode 100644 index 000000000..a86d3bffd --- /dev/null +++ b/libs/template/templates/default-python/template/__preamble.tmpl @@ -0,0 +1,38 @@ +# Preamble + +This file only template directives; it is skipped for the actual output. + +{{skip "__preamble"}} + +{{ $value := .project_name }} +{{with (regexp "^[A-Za-z0-9_]*$")}} + {{if not (.MatchString $value)}} + {{fail "Invalid project_name: %s. Must consist of letter and underscores only." $value}} + {{end}} +{{end}} + +{{$notDLT := not (eq .include_dlt "yes")}} +{{$notNotebook := not (eq .include_notebook "yes")}} +{{$notPython := not (eq .include_python "yes")}} + +{{if $notPython}} + {{skip "{{.project_name}}/src/{{.project_name}}"}} + {{skip "{{.project_name}}/tests/main_test.py"}} + {{skip "{{.project_name}}/setup.py"}} + {{skip "{{.project_name}}/pytest.ini"}} +{{end}} + +{{if $notDLT}} + {{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}} + {{skip "{{.project_name}}/resources/{{.project_name}}_pipeline.yml"}} +{{end}} + +{{if $notNotebook}} + {{skip "{{.project_name}}/src/notebook.ipynb"}} +{{end}} + +{{if (and $notDLT $notNotebook $notPython)}} + {{skip "{{.project_name}}/resources/{{.project_name}}_job.yml"}} +{{else}} + {{skip "{{.project_name}}/resources/.gitkeep"}} +{{end}} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.gitignore b/libs/template/templates/default-python/template/{{.project_name}}/.gitignore new file mode 100644 index 000000000..aa87f0198 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.gitignore @@ -0,0 +1,9 @@ + +.databricks/ +build/ +dist/ +__pycache__/ +*.egg-info +.venv/ +scratch/** +!scratch/README.md diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi new file mode 100644 index 000000000..0edd5181b --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi @@ -0,0 +1,3 @@ +# Typings for Pylance in Visual Studio Code +# see https://github.com/microsoft/pyright/blob/main/docs/builtins.md +from databricks.sdk.runtime import * diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json new file mode 100644 index 000000000..5d15eba36 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "databricks.databricks", + "ms-python.vscode-pylance", + "redhat.vscode-yaml" + ] +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json new file mode 100644 index 000000000..16cb2c96a --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json @@ -0,0 +1,14 @@ +{ + "python.analysis.stubPath": ".vscode", + "databricks.python.envFile": "${workspaceFolder}/.env", + "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", + "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------", + "python.testing.pytestArgs": [ + "." + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "files.exclude": { + "**/*.egg-info": true + }, +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl new file mode 100644 index 000000000..1bcd7af41 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl @@ -0,0 +1,44 @@ +# {{.project_name}} + +The '{{.project_name}}' project was generated by using the default-python template. + +## Getting started + +1. Install the Databricks CLI from https://docs.databricks.com/dev-tools/cli/databricks-cli.html + +2. Authenticate to your Databricks workspace: + ``` + $ databricks configure + ``` + +3. To deploy a development copy of this project, type: + ``` + $ databricks bundle deploy --target dev + ``` + (Note that "dev" is the default target, so the `--target` parameter + is optional here.) + + This deploys everything that's defined for this project. + For example, the default template would deploy a job called + `[dev yourname] {{.project_name}}_job` to your workspace. + You can find that job by opening your workpace and clicking on **Workflows**. + +4. Similarly, to deploy a production copy, type: + ``` + $ databricks bundle deploy --target prod + ``` + +5. To run a job or pipeline, use the "run" comand: + ``` + $ databricks bundle run {{.project_name}}_job + ``` + +6. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from + https://docs.databricks.com/dev-tools/vscode-ext.html. +{{- if (eq .include_python "yes") }} Or read the "getting started" documentation for + **Databricks Connect** for instructions on running the included Python code from a different IDE. +{{- end}} + +7. For documentation on the Databricks asset bundles format used + for this project, and for CI/CD configuration, see + https://docs.databricks.com/dev-tools/bundles/index.html. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl new file mode 100644 index 000000000..7fbf4da4c --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl @@ -0,0 +1,52 @@ +# This is a Databricks asset bundle definition for {{.project_name}}. +# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. +bundle: + name: {{.project_name}} + +include: + - resources/*.yml + +targets: + # The 'dev' target, used for development purposes. + # Whenever a developer deploys using 'dev', they get their own copy. + dev: + # We use 'mode: development' to make sure everything deployed to this target gets a prefix + # like '[dev my_user_name]'. Setting this mode also disables any schedules and + # automatic triggers for jobs and enables the 'development' mode for Delta Live Tables pipelines. + mode: development + default: true + workspace: + host: {{workspace_host}} + + # Optionally, there could be a 'staging' target here. + # (See Databricks docs on CI/CD at https://docs.databricks.com/dev-tools/bundles/index.html.) + # + # staging: + # workspace: + # host: {{workspace_host}} + + # The 'prod' target, used for production deployment. + prod: + # For production deployments, we only have a single copy, so we override the + # workspace.root_path default of + # /Users/${workspace.current_user.userName}/.bundle/${bundle.target}/${bundle.name} + # to a path that is not specific to the current user. + {{- /* + Explaining 'mode: production' isn't as pressing as explaining 'mode: development'. + As we already talked about the other mode above, users can just + look at documentation or ask the assistant about 'mode: production'. + # + # By making use of 'mode: production' we enable strict checks + # to make sure we have correctly configured this target. + */}} + mode: production + workspace: + host: {{workspace_host}} + root_path: /Shared/.bundle/prod/${bundle.name} + {{- if not is_service_principal}} + run_as: + # This runs as {{user_name}} in production. Alternatively, + # a service principal could be used here using service_principal_name + # (see Databricks documentation). + user_name: {{user_name}} + {{end -}} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl new file mode 100644 index 000000000..ee9570302 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl @@ -0,0 +1,27 @@ +# Fixtures +{{- /* +We don't want to have too many README.md files, since they +stand out so much. But we do need to have a file here to make +sure the folder is added to Git. +*/}} + +This folder is reserved for fixtures, such as CSV files. + +Below is an example of how to load fixtures as a data frame: + +``` +import pandas as pd +import os + +def get_absolute_path(*relative_parts): + if 'dbutils' in globals(): + base_dir = os.path.dirname(dbutils.notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()) # type: ignore + path = os.path.normpath(os.path.join(base_dir, *relative_parts)) + return path if path.startswith("/Workspace") else "/Workspace" + path + else: + return os.path.join(*relative_parts) + +csv_file = get_absolute_path("..", "fixtures", "mycsv.csv") +df = pd.read_csv(csv_file) +display(df) +``` diff --git a/libs/template/templates/default-python/template/{{.project_name}}/pytest.ini b/libs/template/templates/default-python/template/{{.project_name}}/pytest.ini new file mode 100644 index 000000000..80432c220 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +testpaths = tests +pythonpath = src diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep b/libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep new file mode 100644 index 000000000..3e09c14c1 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep @@ -0,0 +1 @@ +This folder is reserved for Databricks Asset Bundles resource definitions. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl new file mode 100644 index 000000000..1792f9479 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl @@ -0,0 +1,64 @@ +# The main job for {{.project_name}} +resources: + jobs: + {{.project_name}}_job: + name: {{.project_name}}_job + + schedule: + quartz_cron_expression: '44 37 8 * * ?' + timezone_id: Europe/Amsterdam + + {{- if not is_service_principal}} + + email_notifications: + on_failure: + - {{user_name}} + + {{else}} + + {{end -}} + + tasks: + {{- if eq .include_notebook "yes" }} + - task_key: notebook_task + job_cluster_key: job_cluster + notebook_task: + notebook_path: ../src/notebook.ipynb + {{end -}} + {{- if (eq .include_dlt "yes") }} + - task_key: refresh_pipeline + {{- if (eq .include_notebook "yes" )}} + depends_on: + - task_key: notebook_task + {{- end}} + pipeline_task: + {{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}_pipeline.yml */}} + pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id} + {{end -}} + {{- if (eq .include_python "yes") }} + - task_key: main_task + {{- if (eq .include_dlt "yes") }} + depends_on: + - task_key: refresh_pipeline + {{- else if (eq .include_notebook "yes" )}} + depends_on: + - task_key: notebook_task + {{end}} + job_cluster_key: job_cluster + python_wheel_task: + package_name: {{.project_name}} + entry_point: main + libraries: + - whl: ../dist/*.whl + + {{else}} + {{end -}} + job_clusters: + - job_cluster_key: job_cluster + new_cluster: + {{- /* we should always use an LTS version in our templates */}} + spark_version: 13.3.x-scala2.12 + node_type_id: {{smallest_node_type}} + autoscale: + min_workers: 1 + max_workers: 4 diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl new file mode 100644 index 000000000..4b8f74d17 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl @@ -0,0 +1,12 @@ +# The main pipeline for {{.project_name}} +resources: + pipelines: + {{.project_name}}_pipeline: + name: {{.project_name}}_pipeline + target: {{.project_name}}_${bundle.environment} + libraries: + - notebook: + path: ../src/dlt_pipeline.ipynb + + configuration: + bundle.sourcePath: /Workspace/${workspace.file_path}/src diff --git a/libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md b/libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md new file mode 100644 index 000000000..e6cfb81b4 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md @@ -0,0 +1,4 @@ +# scratch + +This folder is reserved for personal, exploratory notebooks. +By default these are not committed to Git, as 'scratch' is listed in .gitignore. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl new file mode 100644 index 000000000..04bb261cd --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl @@ -0,0 +1,54 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": { + "byteLimit": 2048000, + "rowLimit": 10000 + }, + "inputWidgets": {}, + "nuid": "6bca260b-13d1-448f-8082-30b60a85c9ae", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + {{- if (eq .include_python "yes") }} + "import sys\n", + "sys.path.append('../src')\n", + "from {{.project_name}} import main\n", + "\n", + "main.get_taxis().show(10)" + {{else}} + "spark.range(10)" + {{end -}} + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "ipynb-notebook", + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl new file mode 100644 index 000000000..efd598820 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl @@ -0,0 +1,24 @@ +""" +Setup script for {{.project_name}}. + +This script packages and distributes the associated wheel file(s). +Source code is in ./src/. Run 'python setup.py sdist bdist_wheel' to build. +""" +from setuptools import setup, find_packages + +import sys +sys.path.append('./src') + +import {{.project_name}} + +setup( + name="{{.project_name}}", + version={{.project_name}}.__version__, + url="https://databricks.com", + author="{{user_name}}", + description="my test wheel", + packages=find_packages(where='./src'), + package_dir={'': 'src'}, + entry_points={"entry_points": "main={{.project_name}}.main:main"}, + install_requires=["setuptools"], +) diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl new file mode 100644 index 000000000..4f50294f6 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl @@ -0,0 +1,102 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "9a626959-61c8-4bba-84d2-2a4ecab1f7ec", + "showTitle": false, + "title": "" + } + }, + "source": [ + "# DLT pipeline\n", + "\n", + "This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "9198e987-5606-403d-9f6d-8f14e6a4017f", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + {{- if (eq .include_python "yes") }} + "# Import DLT and src/{{.project_name}}\n", + "import dlt\n", + "import sys\n", + "sys.path.append(spark.conf.get(\"bundle.sourcePath\", \".\"))\n", + "from pyspark.sql.functions import expr\n", + "from {{.project_name}} import main" + {{else}} + "import dlt\n", + "from pyspark.sql.functions import expr\n", + "from pyspark.sql import SparkSession\n", + "spark = SparkSession.builder.getOrCreate()" + {{end -}} + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "3fc19dba-61fd-4a89-8f8c-24fee63bfb14", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + {{- if (eq .include_python "yes") }} + "@dlt.view\n", + "def taxi_raw():\n", + " return main.get_taxis()\n", + {{else}} + "\n", + "@dlt.view\n", + "def taxi_raw():\n", + " return spark.read.format(\"json\").load(\"/databricks-datasets/nyctaxi/sample/json/\")\n", + {{end -}} + "\n", + "@dlt.table\n", + "def filtered_taxis():\n", + " return dlt.read(\"taxi_raw\").filter(expr(\"fare_amount < 30\"))" + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "dlt_pipeline", + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl new file mode 100644 index 000000000..0ab61db2c --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl @@ -0,0 +1,69 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "ee353e42-ff58-4955-9608-12865bd0950e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "# Default notebook\n", + "\n", + "This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": { + "byteLimit": 2048000, + "rowLimit": 10000 + }, + "inputWidgets": {}, + "nuid": "6bca260b-13d1-448f-8082-30b60a85c9ae", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + {{- if (eq .include_python "yes") }} + "from {{.project_name}} import main\n", + "\n", + "main.get_taxis().show(10)" + {{else}} + "spark.range(10)" + {{end -}} + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "notebook", + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py new file mode 100644 index 000000000..f102a9cad --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl new file mode 100644 index 000000000..4fe5ac8f4 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl @@ -0,0 +1,16 @@ +{{- /* +We use pyspark.sql rather than DatabricksSession.builder.getOrCreate() +for compatibility with older runtimes. With a new runtime, it's +equivalent to DatabricksSession.builder.getOrCreate(). +*/ -}} +from pyspark.sql import SparkSession + +def get_taxis(): + spark = SparkSession.builder.getOrCreate() + return spark.read.table("samples.nyctaxi.trips") + +def main(): + get_taxis().show(5) + +if __name__ == '__main__': + main() diff --git a/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl new file mode 100644 index 000000000..f1750046a --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl @@ -0,0 +1,5 @@ +from {{.project_name}} import main + +def test_main(): + taxis = main.get_taxis() + assert taxis.count() > 5 diff --git a/libs/template/testdata/config-assign-from-file-invalid-int/config.json b/libs/template/testdata/config-assign-from-file-invalid-int/config.json new file mode 100644 index 000000000..a97bf0c2e --- /dev/null +++ b/libs/template/testdata/config-assign-from-file-invalid-int/config.json @@ -0,0 +1,6 @@ +{ + "int_val": "abc", + "float_val": 2, + "bool_val": true, + "string_val": "hello" +} diff --git a/libs/template/testdata/config-assign-from-file-unknown-property/config.json b/libs/template/testdata/config-assign-from-file-unknown-property/config.json new file mode 100644 index 000000000..518eaa6a2 --- /dev/null +++ b/libs/template/testdata/config-assign-from-file-unknown-property/config.json @@ -0,0 +1,3 @@ +{ + "unknown_prop": 123 +} diff --git a/libs/template/testdata/config-assign-from-file/config.json b/libs/template/testdata/config-assign-from-file/config.json new file mode 100644 index 000000000..564001e57 --- /dev/null +++ b/libs/template/testdata/config-assign-from-file/config.json @@ -0,0 +1,6 @@ +{ + "int_val": 1, + "float_val": 2, + "bool_val": true, + "string_val": "hello" +} diff --git a/libs/template/testdata/config-test-schema/test-schema.json b/libs/template/testdata/config-test-schema/test-schema.json new file mode 100644 index 000000000..41eb82519 --- /dev/null +++ b/libs/template/testdata/config-test-schema/test-schema.json @@ -0,0 +1,18 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + } +} diff --git a/libs/template/testdata/copy-file-walk/template/not-a-template b/libs/template/testdata/copy-file-walk/template/not-a-template new file mode 100644 index 000000000..8baef1b4a --- /dev/null +++ b/libs/template/testdata/copy-file-walk/template/not-a-template @@ -0,0 +1 @@ +abc diff --git a/libs/template/testdata/email/library/email.tmpl b/libs/template/testdata/email/library/email.tmpl new file mode 100644 index 000000000..1897d46b3 --- /dev/null +++ b/libs/template/testdata/email/library/email.tmpl @@ -0,0 +1 @@ +{{define "email"}}shreyas.goenka@databricks.com{{end}} diff --git a/libs/template/testdata/email/template/my_email.tmpl b/libs/template/testdata/email/template/my_email.tmpl new file mode 100644 index 000000000..0b74ef47c --- /dev/null +++ b/libs/template/testdata/email/template/my_email.tmpl @@ -0,0 +1 @@ +{{template "email"}} diff --git a/libs/template/testdata/executable-bit-read/template/not-a-script.tmpl b/libs/template/testdata/executable-bit-read/template/not-a-script.tmpl new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/executable-bit-read/template/script.sh.tmpl b/libs/template/testdata/executable-bit-read/template/script.sh.tmpl new file mode 100755 index 000000000..09990d446 --- /dev/null +++ b/libs/template/testdata/executable-bit-read/template/script.sh.tmpl @@ -0,0 +1 @@ +echo "hello" diff --git a/libs/template/testdata/fail/template/hello.tmpl b/libs/template/testdata/fail/template/hello.tmpl new file mode 100644 index 000000000..d9426f8b2 --- /dev/null +++ b/libs/template/testdata/fail/template/hello.tmpl @@ -0,0 +1 @@ +{{fail "I am an error message"}} diff --git a/libs/template/testdata/file-tree-rendering/template/{{.dir_name}}/{{.file_name}}.tmpl b/libs/template/testdata/file-tree-rendering/template/{{.dir_name}}/{{.file_name}}.tmpl new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/map-pair/library/abc.tmpl b/libs/template/testdata/map-pair/library/abc.tmpl new file mode 100644 index 000000000..387c7555a --- /dev/null +++ b/libs/template/testdata/map-pair/library/abc.tmpl @@ -0,0 +1,3 @@ +{{- define "my_template" -}} +{{- .foo}} {{.bar}} {{.abc}} {{.def -}} +{{- end -}} diff --git a/libs/template/testdata/map-pair/template/hello.tmpl b/libs/template/testdata/map-pair/template/hello.tmpl new file mode 100644 index 000000000..d0077846e --- /dev/null +++ b/libs/template/testdata/map-pair/template/hello.tmpl @@ -0,0 +1 @@ +{{template "my_template" (map (pair "foo" false) (pair "bar" 123) (pair "abc" "hello") (pair "def" 12.3)) -}} diff --git a/libs/template/testdata/print-without-processing/template/hello.tmpl b/libs/template/testdata/print-without-processing/template/hello.tmpl new file mode 100644 index 000000000..735d02099 --- /dev/null +++ b/libs/template/testdata/print-without-processing/template/hello.tmpl @@ -0,0 +1 @@ +{{`{{ fail "abc" }}`}} diff --git a/libs/template/testdata/regexp-compile/template/hello.tmpl b/libs/template/testdata/regexp-compile/template/hello.tmpl new file mode 100644 index 000000000..5ea55d795 --- /dev/null +++ b/libs/template/testdata/regexp-compile/template/hello.tmpl @@ -0,0 +1,5 @@ +{{with (regexp "foo.?")}} +{{range $index, $element := (.FindAllString "seafood fool" -1) }} +{{print $index ":" $element}} +{{end}} +{{end}} diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file1 b/libs/template/testdata/skip-all-files-in-cwd/template/file1 new file mode 100644 index 000000000..789819226 --- /dev/null +++ b/libs/template/testdata/skip-all-files-in-cwd/template/file1 @@ -0,0 +1 @@ +a diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file2 b/libs/template/testdata/skip-all-files-in-cwd/template/file2 new file mode 100644 index 000000000..617807982 --- /dev/null +++ b/libs/template/testdata/skip-all-files-in-cwd/template/file2 @@ -0,0 +1 @@ +b diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file3.tmpl b/libs/template/testdata/skip-all-files-in-cwd/template/file3.tmpl new file mode 100644 index 000000000..9411049f9 --- /dev/null +++ b/libs/template/testdata/skip-all-files-in-cwd/template/file3.tmpl @@ -0,0 +1,3 @@ +c + +{{skip "*"}} diff --git a/libs/template/testdata/skip-dir-eagerly/template/dir1/file1.tmpl b/libs/template/testdata/skip-dir-eagerly/template/dir1/file1.tmpl new file mode 100644 index 000000000..bbf6881bf --- /dev/null +++ b/libs/template/testdata/skip-dir-eagerly/template/dir1/file1.tmpl @@ -0,0 +1 @@ +{{fail "This template should never be executed"}} diff --git a/libs/template/testdata/skip-dir-eagerly/template/file2.tmpl b/libs/template/testdata/skip-dir-eagerly/template/file2.tmpl new file mode 100644 index 000000000..afdf908cb --- /dev/null +++ b/libs/template/testdata/skip-dir-eagerly/template/file2.tmpl @@ -0,0 +1,3 @@ +I should be the only file created + +{{skip "dir1"}} diff --git a/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3.tmpl b/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3.tmpl new file mode 100644 index 000000000..0f24f26d5 --- /dev/null +++ b/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3.tmpl @@ -0,0 +1 @@ +{{skip "c"}} diff --git a/libs/template/testdata/skip-is-relative/template/dir1/file2.tmpl b/libs/template/testdata/skip-is-relative/template/dir1/file2.tmpl new file mode 100644 index 000000000..53474b01e --- /dev/null +++ b/libs/template/testdata/skip-is-relative/template/dir1/file2.tmpl @@ -0,0 +1 @@ +{{skip "b"}} diff --git a/libs/template/testdata/skip-is-relative/template/file1.tmpl b/libs/template/testdata/skip-is-relative/template/file1.tmpl new file mode 100644 index 000000000..b74590a78 --- /dev/null +++ b/libs/template/testdata/skip-is-relative/template/file1.tmpl @@ -0,0 +1 @@ +{{skip "a"}} diff --git a/libs/template/testdata/skip/template/dir1/file4 b/libs/template/testdata/skip/template/dir1/file4 new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/skip/template/dir1/file5 b/libs/template/testdata/skip/template/dir1/file5 new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/skip/template/dir2/file6 b/libs/template/testdata/skip/template/dir2/file6 new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/skip/template/file1.tmpl b/libs/template/testdata/skip/template/file1.tmpl new file mode 100644 index 000000000..9c8752690 --- /dev/null +++ b/libs/template/testdata/skip/template/file1.tmpl @@ -0,0 +1 @@ +{{skip "file3"}} diff --git a/libs/template/testdata/skip/template/file2.tmpl b/libs/template/testdata/skip/template/file2.tmpl new file mode 100644 index 000000000..75db13eab --- /dev/null +++ b/libs/template/testdata/skip/template/file2.tmpl @@ -0,0 +1,2 @@ +{{skip "dir1/file4"}} +{{skip "dir2/*"}} diff --git a/libs/template/testdata/skip/template/file3 b/libs/template/testdata/skip/template/file3 new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/libs/template/testdata/skip/template/file3 @@ -0,0 +1 @@ + diff --git a/libs/template/testdata/template-in-path/library/my_funcs.tmpl b/libs/template/testdata/template-in-path/library/my_funcs.tmpl new file mode 100644 index 000000000..3415ad774 --- /dev/null +++ b/libs/template/testdata/template-in-path/library/my_funcs.tmpl @@ -0,0 +1,7 @@ +{{define "dir_name" -}} +my_directory +{{- end}} + +{{define "file_name" -}} +my_file +{{- end}} diff --git a/libs/template/testdata/template-in-path/template/{{template `dir_name`}}/{{template `file_name`}} b/libs/template/testdata/template-in-path/template/{{template `dir_name`}}/{{template `file_name`}} new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/urlparse-function/template/hello.tmpl b/libs/template/testdata/urlparse-function/template/hello.tmpl new file mode 100644 index 000000000..c365284b2 --- /dev/null +++ b/libs/template/testdata/urlparse-function/template/hello.tmpl @@ -0,0 +1,3 @@ +{{ with url "https://www.databricks.com/a/b?o=123#my-fragment" -}} +{{- print .Scheme `://` .Host -}} +{{- end -}} diff --git a/libs/template/testdata/walk/template/dir1/dir3/file3 b/libs/template/testdata/walk/template/dir1/dir3/file3 new file mode 100644 index 000000000..8662caa51 --- /dev/null +++ b/libs/template/testdata/walk/template/dir1/dir3/file3 @@ -0,0 +1 @@ +file three diff --git a/libs/template/testdata/walk/template/dir2/file4.tmpl b/libs/template/testdata/walk/template/dir2/file4.tmpl new file mode 100644 index 000000000..53e66a681 --- /dev/null +++ b/libs/template/testdata/walk/template/dir2/file4.tmpl @@ -0,0 +1,5 @@ +{{if (eq 1 1)}} +file four +{{else}} +mathematics is a lie +{{end}} diff --git a/libs/template/testdata/walk/template/file1 b/libs/template/testdata/walk/template/file1 new file mode 100644 index 000000000..ce2b3df90 --- /dev/null +++ b/libs/template/testdata/walk/template/file1 @@ -0,0 +1 @@ +file one diff --git a/libs/template/testdata/walk/template/file2 b/libs/template/testdata/walk/template/file2 new file mode 100644 index 000000000..6c970dbb4 --- /dev/null +++ b/libs/template/testdata/walk/template/file2 @@ -0,0 +1 @@ +file two diff --git a/libs/template/testdata/workspace-host/template/file.tmpl b/libs/template/testdata/workspace-host/template/file.tmpl new file mode 100644 index 000000000..2098e41b4 --- /dev/null +++ b/libs/template/testdata/workspace-host/template/file.tmpl @@ -0,0 +1,2 @@ +{{workspace_host}} +{{smallest_node_type}} diff --git a/main.go b/main.go index 959c9b295..8c8516d9d 100644 --- a/main.go +++ b/main.go @@ -1,19 +1,12 @@ package main import ( - _ "github.com/databricks/cli/cmd/account" - _ "github.com/databricks/cli/cmd/api" - _ "github.com/databricks/cli/cmd/auth" - _ "github.com/databricks/cli/cmd/bundle" - _ "github.com/databricks/cli/cmd/bundle/debug" - _ "github.com/databricks/cli/cmd/configure" - _ "github.com/databricks/cli/cmd/fs" + "context" + + "github.com/databricks/cli/cmd" "github.com/databricks/cli/cmd/root" - _ "github.com/databricks/cli/cmd/sync" - _ "github.com/databricks/cli/cmd/version" - _ "github.com/databricks/cli/cmd/workspace" ) func main() { - root.Execute() + root.Execute(cmd.New(context.Background())) } diff --git a/main_test.go b/main_test.go index 4c7a8ebc3..34ecdca0f 100644 --- a/main_test.go +++ b/main_test.go @@ -1,9 +1,10 @@ package main import ( + "context" "testing" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" ) @@ -15,7 +16,7 @@ func TestCommandsDontUseUnderscoreInName(t *testing.T) { // This test lives in the main package because this is where // all commands are imported. // - queue := []*cobra.Command{root.RootCmd} + queue := []*cobra.Command{cmd.New(context.Background())} for len(queue) > 0 { cmd := queue[0] assert.NotContains(t, cmd.Name(), "_") diff --git a/python/runner.go b/python/runner.go index 6145da277..b2946b297 100644 --- a/python/runner.go +++ b/python/runner.go @@ -15,7 +15,7 @@ func PyInline(ctx context.Context, inlinePy string) (string, error) { } func Py(ctx context.Context, script string, args ...string) (string, error) { - py, err := detectExecutable(ctx) + py, err := DetectExecutable(ctx) if err != nil { return "", err } @@ -70,7 +70,7 @@ func detectVirtualEnv() (string, error) { var pyExec string -func detectExecutable(ctx context.Context) (string, error) { +func DetectExecutable(ctx context.Context) (string, error) { if pyExec != "" { return pyExec, nil } diff --git a/python/runner_test.go b/python/runner_test.go index 321a1b7dc..b43d218ce 100644 --- a/python/runner_test.go +++ b/python/runner_test.go @@ -25,14 +25,14 @@ func TestExecAndPassError(t *testing.T) { func TestDetectPython(t *testing.T) { pyExec = "" - py, err := detectExecutable(context.Background()) + py, err := DetectExecutable(context.Background()) assert.NoError(t, err) assert.Contains(t, py, "python3") } func TestDetectPythonCache(t *testing.T) { pyExec = "abc" - py, err := detectExecutable(context.Background()) + py, err := DetectExecutable(context.Background()) assert.NoError(t, err) assert.Equal(t, "abc", py) pyExec = "" @@ -82,7 +82,7 @@ func TestPyInline(t *testing.T) { } func TestPyInlineStderr(t *testing.T) { - detectExecutable(context.Background()) + DetectExecutable(context.Background()) inline := "import sys; sys.stderr.write('___msg___'); sys.exit(1)" _, err := PyInline(context.Background(), inline) assert.EqualError(t, err, "___msg___") diff --git a/python/utils.go b/python/utils.go new file mode 100644 index 000000000..47d5462d2 --- /dev/null +++ b/python/utils.go @@ -0,0 +1,50 @@ +package python + +// TODO: move this package into the libs + +import ( + "context" + "os" + "path/filepath" + "strings" + + "github.com/databricks/cli/libs/log" +) + +func CleanupWheelFolder(dir string) { + // there or not there - we don't care + os.RemoveAll(filepath.Join(dir, "__pycache__")) + os.RemoveAll(filepath.Join(dir, "build")) + eggInfo := FindFilesWithSuffixInPath(dir, ".egg-info") + if len(eggInfo) == 0 { + return + } + for _, f := range eggInfo { + os.RemoveAll(f) + } +} + +func FindFilesWithSuffixInPath(dir, suffix string) []string { + f, err := os.Open(dir) + if err != nil { + log.Debugf(context.Background(), "open dir %s: %s", dir, err) + return nil + } + defer f.Close() + + entries, err := f.ReadDir(0) + if err != nil { + log.Debugf(context.Background(), "read dir %s: %s", dir, err) + // todo: log + return nil + } + + files := make([]string, 0) + for _, child := range entries { + if !strings.HasSuffix(child.Name(), suffix) { + continue + } + files = append(files, filepath.Join(dir, child.Name())) + } + return files +} diff --git a/python/utils_test.go b/python/utils_test.go new file mode 100644 index 000000000..1656d1ecb --- /dev/null +++ b/python/utils_test.go @@ -0,0 +1,21 @@ +package python + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFindFilesWithSuffixInPath(t *testing.T) { + dir, err := os.Getwd() + require.NoError(t, err) + + files := FindFilesWithSuffixInPath(dir, "test.go") + + matches, err := filepath.Glob(filepath.Join(dir, "*test.go")) + require.NoError(t, err) + + require.ElementsMatch(t, files, matches) +} diff --git a/python/wheel.go b/python/wheel.go index ff05509dc..39c3d4cb4 100644 --- a/python/wheel.go +++ b/python/wheel.go @@ -6,7 +6,6 @@ import ( "io" "os" "path" - "strings" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" @@ -18,7 +17,7 @@ func BuildWheel(ctx context.Context, dir string) (string, error) { // remove previous dist leak os.RemoveAll("dist") // remove all other irrelevant traces - silentlyCleanupWheelFolder(".") + CleanupWheelFolder(".") // call simple wheel builder. we may need to pip install wheel as well out, err := Py(ctx, "setup.py", "bdist_wheel") if err != nil { @@ -27,13 +26,16 @@ func BuildWheel(ctx context.Context, dir string) (string, error) { log.Debugf(ctx, "Built wheel: %s", out) // and cleanup afterwards - silentlyCleanupWheelFolder(".") + CleanupWheelFolder(".") - wheel := silentChildWithSuffix("dist", ".whl") - if wheel == "" { + wheels := FindFilesWithSuffixInPath("dist", ".whl") + if len(wheels) == 0 { return "", fmt.Errorf("cannot find built wheel in %s", dir) } - return path.Join(dir, wheel), nil + if len(wheels) != 1 { + return "", fmt.Errorf("more than 1 wheel file found in %s", dir) + } + return path.Join(dir, wheels[0]), nil } const DBFSWheelLocation = "dbfs:/FileStore/wheels/simple" @@ -82,38 +84,6 @@ func UploadWheelToDBFSWithPEP503(ctx context.Context, dir string) (string, error return dbfsLoc, err } -func silentlyCleanupWheelFolder(dir string) { - // there or not there - we don't care - os.RemoveAll(path.Join(dir, "__pycache__")) - os.RemoveAll(path.Join(dir, "build")) - eggInfo := silentChildWithSuffix(dir, ".egg-info") - if eggInfo == "" { - return - } - os.RemoveAll(eggInfo) -} - -func silentChildWithSuffix(dir, suffix string) string { - f, err := os.Open(dir) - if err != nil { - log.Debugf(context.Background(), "open dir %s: %s", dir, err) - return "" - } - entries, err := f.ReadDir(0) - if err != nil { - log.Debugf(context.Background(), "read dir %s: %s", dir, err) - // todo: log - return "" - } - for _, child := range entries { - if !strings.HasSuffix(child.Name(), suffix) { - continue - } - return path.Join(dir, child.Name()) - } - return "" -} - func chdirAndBack(dir string) func() { wd, _ := os.Getwd() os.Chdir(dir)