This commit is contained in:
Shreyas Goenka 2025-03-10 15:29:28 +01:00
commit 3172424150
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
166 changed files with 6962 additions and 6102 deletions

View File

@ -241,7 +241,7 @@ func new{{.PascalName}}() *cobra.Command {
cmd.PreRunE = root.Must{{if .Service.IsAccounts}}Account{{else}}Workspace{{end}}Client
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
{{if .Service.IsAccounts}}a := command.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
{{if .Service.IsAccounts}}a := command.AccountClient(ctx){{else}}w := command.WorkspaceClient(ctx){{end}}
{{- if .Request }}
{{ if $canUseJson }}
if cmd.Flags().Changed("json") {

View File

@ -8,7 +8,5 @@ For example, were there any decisions behind the change that are not reflected i
## Tests
<!-- How have you tested the changes? -->
---
<!-- If your PR needs to be included in the release notes for next release,
remove the line below and add a separate entry in NEXT_CHANGELOG.md as part of your PR. -->
NO_CHANGELOG=true
add a separate entry in NEXT_CHANGELOG.md as part of your PR. -->

View File

@ -1,105 +0,0 @@
# Generated file. DO NOT EDIT.
name: Check for NEXT_CHANGELOG.md Changes
on:
# Use pull_request_target to have access to GitHub API
pull_request_target:
jobs:
check-next-changelog:
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Fetch list of changed files
id: changed-files
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Use the GitHub API to fetch changed files
files=$(gh pr view ${{ github.event.pull_request.number }} --json files -q '.files[].path')
# Sanitize to avoid code injection
sanitized_files=$(echo "$files" | sed 's/[^a-zA-Z0-9._/-]/_/g')
# Store the sanitized list of files in a temporary file to avoid env variable issues
echo "$sanitized_files" > modified_files.txt
- name: Fetch PR message
id: pr-message
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Use the GitHub API to fetch the PR message
pr_message=$(gh pr view ${{ github.event.pull_request.number }} --json body -q '.body')
# Sanitize the PR message to avoid code injection, keeping the equal sign
sanitized_pr_message=$(echo "$pr_message" | sed 's/[^a-zA-Z0-9._/-=]/_/g')
# Store the sanitized PR message
echo "$sanitized_pr_message" > pr_message.txt
- name: Verify NEXT_CHANGELOG.md was modified or PR message contains NO_CHANGELOG=true
run: |
# Read the sanitized files and PR message from the temporary files
modified_files=$(cat modified_files.txt)
pr_message=$(cat pr_message.txt)
# Check if NEXT_CHANGELOG.md exists in the list of changed files
echo "Changed files: $modified_files"
if ! echo "$modified_files" | grep -q "NEXT_CHANGELOG.md"; then
echo "NEXT_CHANGELOG.md not modified."
# Check if PR message contains NO_CHANGELOG=true
if echo "$pr_message" | grep -q "NO_CHANGELOG=true"; then
echo "NO_CHANGELOG=true found in PR message. Skipping changelog check."
exit 0
else
echo "WARNING: file NEXT_CHANGELOG.md not changed. If this is expected, add NO_CHANGELOG=true to the PR message."
exit 1
fi
fi
- name: Comment on PR with instructions if needed
if: failure() # This step will only run if the previous step fails (i.e., if NEXT_CHANGELOG.md was not modified and NO_CHANGELOG=true was not in the PR message)
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Check if a comment exists with the instructions
previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \
--jq '.[] | select(.body | startswith("<!-- NEXT_CHANGELOG_INSTRUCTIONS -->")) | .id')
echo "Previous comment IDs: $previous_comment_ids"
# If no previous comment exists, add one with instructions
if [ -z "$previous_comment_ids" ]; then
echo "Adding instructions comment."
gh pr comment ${{ github.event.pull_request.number }} --body \
"<!-- NEXT_CHANGELOG_INSTRUCTIONS -->
Please ensure that the NEXT_CHANGELOG.md file is updated with any relevant changes.
If this is not necessary for your PR, please include the following in your PR description:
NO_CHANGELOG=true
and rerun the job."
fi
- name: Delete instructions comment on success
if: success() # This step will only run if the previous check passed (i.e., if NEXT_CHANGELOG.md was modified or NO_CHANGELOG=true is in the PR message)
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Check if there is a previous instructions comment
previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \
--jq '.[] | select(.body | startswith("<!-- NEXT_CHANGELOG_INSTRUCTIONS -->")) | .id')
# If a comment exists, delete it
if [ -n "$previous_comment_ids" ]; then
echo "Deleting previous instructions comment."
for comment_id in $previous_comment_ids; do
gh api "repos/${{ github.repository }}/issues/comments/$comment_id" --method DELETE
done
else
echo "No instructions comment found to delete."
fi

View File

@ -3,6 +3,7 @@
## Release v0.243.1
### CLI
* Fixed "can't evaluate field Name in type interface{}" for "databricks queries list" command ([#2451](https://github.com/databricks/cli/pull/2451))
### Bundles

View File

@ -223,6 +223,10 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
if !isTruePtr(config.Cloud) && cloudEnv != "" {
t.Skipf("Disabled via Cloud setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
} else {
if isTruePtr(config.RequiresUnityCatalog) && os.Getenv("TEST_METASTORE_ID") == "" {
t.Skipf("Skipping on non-UC workspaces")
}
}
var tmpDir string

View File

@ -0,0 +1,10 @@
bundle:
name: bind-schema-test-$BUNDLE_NAME_SUFFIX
resources:
schemas:
schema1:
name: $SCHEMA_NAME
catalog_name: main
comment: This schema was created from DABs

View File

@ -0,0 +1,38 @@
=== Bind schema test:
=== Substitute variables in the template:
=== Create a pre-defined schema: {
"full_name": "main.test-schema-[UUID]",
"catalog_name": "main",
"comment": null
}
=== Bind schema: Updating deployment state...
Successfully bound databricks_schema with an id 'main.test-schema-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace
=== Deploy bundle: Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-schema-test-[UUID]/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
=== Read the pre-defined schema: {
"full_name": "main.test-schema-[UUID]",
"catalog_name": "main",
"comment": "This schema was created from DABs"
}
=== Unbind the schema: Updating deployment state...
=== Destroy the bundle: All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/bind-schema-test-[UUID]/default
Deleting files...
Destroy complete!
=== Read the pre-defined schema again (expecting it still exists): {
"full_name": "main.test-schema-[UUID]",
"catalog_name": "main",
"comment": "This schema was created from DABs"
}
=== Test cleanup:
=== Delete the pre-defined schema test-schema-[UUID]: 0

View File

@ -0,0 +1,36 @@
title "Bind schema test: "
title "Substitute variables in the template: "
export BUNDLE_NAME_SUFFIX=$(uuid)
export SCHEMA_NAME="test-schema-$(uuid)"
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
title "Create a pre-defined schema: "
CATALOG_NAME=main
$CLI schemas create ${SCHEMA_NAME} ${CATALOG_NAME} | jq '{full_name, catalog_name, comment}'
cleanupRemoveSchema() {
title "Test cleanup: "
title "Delete the pre-defined schema ${SCHEMA_NAME}: "
$CLI schemas delete ${CATALOG_NAME}.${SCHEMA_NAME}
echo $?
}
trap cleanupRemoveSchema EXIT
title "Bind schema: "
$CLI bundle deployment bind schema1 ${CATALOG_NAME}.${SCHEMA_NAME} --auto-approve
title "Deploy bundle: "
$CLI bundle deploy --force-lock --auto-approve
title "Read the pre-defined schema: "
$CLI schemas get ${CATALOG_NAME}.${SCHEMA_NAME} | jq '{full_name, catalog_name, comment}'
title "Unbind the schema: "
$CLI bundle deployment unbind schema1
title "Destroy the bundle: "
$CLI bundle destroy --auto-approve
title "Read the pre-defined schema again (expecting it still exists): "
$CLI schemas get ${CATALOG_NAME}.${SCHEMA_NAME} | jq '{full_name, catalog_name, comment}'

View File

@ -0,0 +1,3 @@
Local = false
Cloud = true
RequiresUnityCatalog = true

View File

@ -0,0 +1,5 @@
>>> [CLI] queries list
ID Name Author
[UUID] Example query 1 user@acme.com
[UUID] Example query 2 user@acme.com

View File

@ -0,0 +1 @@
trace $CLI queries list

View File

@ -0,0 +1,58 @@
[[Server]]
Pattern = "GET /api/2.0/sql/queries"
Response.Body = '''
{
"results": [
{
"description": "Example description",
"owner_user_name": "user@acme.com",
"tags": [
"Tag 1"
],
"display_name": "Example query 1",
"id": "ae25e731-92f2-4838-9fb2-1ca364320a3d",
"lifecycle_state": "ACTIVE",
"last_modifier_user_name": "user@acme.com",
"query_text": "SELECT 1",
"parameters": [
{
"name": "foo",
"text_value": {
"value": "bar"
},
"title": "foo"
}
],
"update_time": "2019-08-24T14:15:22Z",
"warehouse_id": "a7066a8ef796be84",
"run_as_mode": "OWNER",
"create_time": "2019-08-24T14:15:22Z"
},
{
"description": "Example description",
"owner_user_name": "user@acme.com",
"tags": [
"Tag 1"
],
"display_name": "Example query 2",
"id": "be25e731-92f2-4838-9fb2-1ca364320a3d",
"lifecycle_state": "ACTIVE",
"last_modifier_user_name": "user@acme.com",
"query_text": "SELECT 1",
"parameters": [
{
"name": "foo",
"text_value": {
"value": "bar"
},
"title": "foo"
}
],
"update_time": "2019-08-24T14:15:22Z",
"warehouse_id": "a7066a8ef796be84",
"run_as_mode": "OWNER",
"create_time": "2019-08-24T14:15:22Z"
}
]
}
'''

View File

@ -31,6 +31,9 @@ type TestConfig struct {
// If true, run this test when running with cloud env configured
Cloud *bool
// If true and Cloud=true, run this test only if unity catalog is available in the cloud environment
RequiresUnityCatalog *bool
// List of additional replacements to apply on this test.
// Old is a regexp, New is a replacement expression.
Repls []testdiff.Replacement

View File

@ -59,3 +59,7 @@ withdir() {
cd "$orig_dir" || return $?
return $exit_code
}
uuid() {
python3 -c 'import uuid; print(uuid.uuid4())'
}

View File

@ -7,6 +7,7 @@ import (
"path"
"reflect"
"strings"
"time"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation"
@ -43,7 +44,7 @@ func main() {
[]string{path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, rootFileName),
reflect.TypeOf(config.Root{}),
string(rootHeader),
fillTemplateVariables(string(rootHeader)),
)
if err != nil {
log.Fatal(err)
@ -56,7 +57,7 @@ func main() {
[]string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, resourcesFileName),
reflect.TypeOf(config.Resources{}),
string(resourcesHeader),
fillTemplateVariables(string(resourcesHeader)),
)
if err != nil {
log.Fatal(err)
@ -133,3 +134,8 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
s.Examples = []string{a.MarkdownExamples}
}
}
func fillTemplateVariables(s string) string {
currentDate := time.Now().Format("2006-01-02")
return strings.ReplaceAll(s, "{{update_date}}", currentDate)
}

View File

@ -12,7 +12,7 @@ func buildMarkdown(nodes []rootNode, outputFile, header string) error {
m = m.PlainText(header)
for _, node := range nodes {
m = m.LF()
title := escapeBrackets(node.Title)
title := node.Title
if node.TopLevel {
m = m.H2(title)
} else {
@ -68,21 +68,24 @@ func pickLastWord(s string) string {
// Build a custom table which we use in Databricks website
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
m = m.LF()
m = m.PlainText(".. list-table::")
m = m.PlainText(" :header-rows: 1")
m = m.PlainText(":::list-table")
m = m.LF()
m = m.PlainText(" * - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.PlainText("- - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.LF()
for _, a := range attributes {
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.PlainText("- - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.LF()
}
m = m.PlainText(":::")
m = m.LF()
return m
}
@ -94,7 +97,7 @@ func formatDescription(a attributeNode) string {
} else if s != "" {
s += ". "
}
s += fmt.Sprintf("See [_](#%s).", cleanAnchor(a.Link))
s += fmt.Sprintf("See [\\_](#%s).", cleanAnchor(a.Link))
}
return s
}
@ -102,15 +105,7 @@ func formatDescription(a attributeNode) string {
// Docs framework does not allow special characters in anchor links and strip them out by default
// We need to clean them up to make sure the links pass the validation
func cleanAnchor(s string) string {
s = strings.ReplaceAll(s, "<", "")
s = strings.ReplaceAll(s, ">", "")
s = strings.ReplaceAll(s, ".", "")
return s
}
func escapeBrackets(s string) string {
s = strings.ReplaceAll(s, "<", "\\<")
s = strings.ReplaceAll(s, ">", "\\>")
s = strings.ReplaceAll(s, nameFieldWithFormat, nameField)
return s
}

View File

@ -20,12 +20,12 @@ func TestBuildMarkdownAnchors(t *testing.T) {
Title: "my_attribute",
Type: "Map",
Description: "Desc with link",
Link: "some_field.<name>.my_attribute",
Link: "some_field._name_.my_attribute",
},
},
},
{
Title: "some_field.<name>.my_attribute",
Title: "some_field._name_.my_attribute",
TopLevel: false,
Type: "Boolean",
Description: "Another description",

View File

@ -137,8 +137,13 @@ func getMapValueType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema) *
return nil
}
const (
nameField = "name"
nameFieldWithFormat = "_name_"
)
func getMapKeyPrefix(s string) string {
return s + ".<name>"
return s + "." + nameFieldWithFormat
}
func removePluralForm(s string) string {

View File

@ -93,11 +93,11 @@ func TestBuildNodes_ChildExpansion(t *testing.T) {
TopLevel: true,
Type: "Map",
ObjectKeyAttributes: []attributeNode{
{Title: "mapSub", Type: "Map", Link: "myMap.<name>.mapSub"},
{Title: "mapSub", Type: "Map", Link: "myMap._name_.mapSub"},
},
},
{
Title: "myMap.<name>.mapSub",
Title: "myMap._name_.mapSub",
Type: "Map",
Attributes: []attributeNode{
{Title: "deepSub", Type: "Boolean"},

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
---
description: 'Configuration reference for databricks.yml'
last_update:
date: 2025-02-14
date: {{update_date}}
---
<!--DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli-->

View File

@ -1,7 +1,7 @@
---
description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.'
last_update:
date: 2025-02-14
date: {{update_date}}
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->

View File

@ -8,19 +8,20 @@ This is a description
.. list-table::
:header-rows: 1
:::list-table
* - Key
- Type
- Description
- - Key
- Type
- Description
* - `my_attribute`
- Map
- Desc with link. See [_](#some_fieldnamemy_attribute).
- - `my_attribute`
- Map
- Desc with link. See [\_](#some_fieldnamemy_attribute).
:::
### some_field.\<name\>.my_attribute
### some_field._name_.my_attribute
**`Type: Boolean`**

View File

@ -188,9 +188,9 @@ github.com/databricks/cli/bundle/config.Resources:
The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [_](/dev-tools/bundles/resources.md#quality_monitors).
"registered_models":
"description": |-
The registered model definitions for the bundle, where each key is the name of the <UC> registered model.
The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.
"markdown_description": |-
The registered model definitions for the bundle, where each key is the name of the <UC> registered model. See [_](/dev-tools/bundles/resources.md#registered_models).
The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [_](/dev-tools/bundles/resources.md#registered_models)
"schemas":
"description": |-
The schema definitions for the bundle, where each key is the name of the schema.
@ -261,7 +261,7 @@ github.com/databricks/cli/bundle/config.Root:
"description": |-
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.
"markdown_description": |-
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about <DABS> supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
```yaml
resources:
@ -271,9 +271,9 @@ github.com/databricks/cli/bundle/config.Root:
```
"run_as":
"description": |-
The identity to use when running <DABS> workflows.
The identity to use when running Databricks Asset Bundles workflows.
"markdown_description": |-
The identity to use when running <DABS> workflows. See [_](/dev-tools/bundles/run-as.md).
The identity to use when running Databricks Asset Bundles workflows. See [_](/dev-tools/bundles/run-as.md).
"sync":
"description": |-
The files and file paths to include or exclude in the bundle.

View File

@ -140,7 +140,7 @@ github.com/databricks/cli/bundle/config/resources.Dashboard:
github.com/databricks/cli/bundle/config/resources.Job:
"_":
"markdown_description": |-
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a <DABS> template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
"markdown_examples": |-
The following example defines a job with the resource key `hello-job` with one notebook task:
@ -188,7 +188,7 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
github.com/databricks/cli/bundle/config/resources.MlflowModel:
"_":
"markdown_description": |-
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use <UC> [registered models](#registered-model) instead.
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](#registered-model) instead.
"permissions":
"description": |-
PLACEHOLDER
@ -197,7 +197,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"markdown_description": |-
The model_serving_endpoint resource allows you to define [model serving endpoints](/api/workspace/servingendpoints/create). See [_](/machine-learning/model-serving/manage-serving-endpoints.md).
"markdown_examples": |-
The following example defines a <UC> model serving endpoint:
The following example defines a Unity Catalog model serving endpoint:
```yaml
resources:
@ -224,7 +224,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
github.com/databricks/cli/bundle/config/resources.Pipeline:
"_":
"markdown_description": |-
The pipeline resource allows you to create <DLT> [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the <DABS> template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
The pipeline resource allows you to create Delta Live Tables [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/dlt/index.md). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
"markdown_examples": |-
The following example defines a pipeline with the resource key `hello-pipeline`:
@ -257,7 +257,7 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"_":
"markdown_description": |-
The quality_monitor resource allows you to define a <UC> [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
The quality_monitor resource allows you to define a Unity Catalog [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
"markdown_examples": |-
The following example defines a quality monitor:
@ -285,9 +285,9 @@ github.com/databricks/cli/bundle/config/resources.QualityMonitor:
github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"_":
"markdown_description": |-
The registered model resource allows you to define models in <UC>. For information about <UC> [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
"markdown_examples": |-
The following example defines a registered model in <UC>:
The following example defines a registered model in Unity Catalog:
```yaml
resources:
@ -308,12 +308,12 @@ github.com/databricks/cli/bundle/config/resources.RegisteredModel:
github.com/databricks/cli/bundle/config/resources.Schema:
"_":
"markdown_description": |-
The schema resource type allows you to define <UC> [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
The schema resource type allows you to define Unity Catalog [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.
- Only fields supported by the corresponding [Schemas object create API](/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](/api/workspace/schemas/update).
"markdown_examples": |-
The following example defines a pipeline with the resource key `my_pipeline` that creates a <UC> schema with the key `my_schema` as the target:
The following example defines a pipeline with the resource key `my_pipeline` that creates a Unity Catalog schema with the key `my_schema` as the target:
```yaml
resources:
@ -334,9 +334,9 @@ github.com/databricks/cli/bundle/config/resources.Schema:
comment: This schema was created by DABs.
```
A top-level grants mapping is not supported by <DABS>, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
A top-level grants mapping is not supported by Databricks Asset Bundles, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
The following example defines a <UC> schema with grants:
The following example defines a Unity Catalog schema with grants:
```yaml
resources:
@ -361,13 +361,13 @@ github.com/databricks/cli/bundle/config/resources.Schema:
github.com/databricks/cli/bundle/config/resources.Volume:
"_":
"markdown_description": |-
The volume resource type allows you to define and create <UC> [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
The volume resource type allows you to define and create Unity Catalog [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets).
"markdown_examples": |-
The following example creates a <UC> volume with the key `my_volume`:
The following example creates a Unity Catalog volume with the key `my_volume`:
```yaml
resources:
@ -378,7 +378,7 @@ github.com/databricks/cli/bundle/config/resources.Volume:
schema_name: my_schema
```
For an example bundle that runs a job that writes to a file in <UC> volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
For an example bundle that runs a job that writes to a file in Unity Catalog volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
"grants":
"description": |-
PLACEHOLDER
@ -579,3 +579,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
"model_version":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
"abfss":
"description": |-
Contains the Azure Data Lake Storage destination path
github.com/databricks/databricks-sdk-go/service/compute.Environment:
"dependencies":
"description": |-
List of pip dependencies, as supported by the version of pip in this environment.
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
"granularities":
"description": |-
Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
"granularities":
"description": |-
Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
"log_analytics_primary_key":
"description": |-
The primary key for the Azure Log Analytics agent configuration
"log_analytics_workspace_id":
"description": |-
The workspace ID for the Azure Log Analytics agent configuration

View File

@ -459,7 +459,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a \u003cDABS\u003e template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
},
{
"type": "string",
@ -552,7 +552,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use \u003cUC\u003e [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
},
{
"type": "string",
@ -743,7 +743,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The pipeline resource allows you to create \u003cDLT\u003e [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the \u003cDABS\u003e template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
"markdownDescription": "The pipeline resource allows you to create Delta Live Tables [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/dlt/index.html). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
},
{
"type": "string",
@ -818,7 +818,7 @@
"assets_dir",
"output_schema_name"
],
"markdownDescription": "The quality_monitor resource allows you to define a \u003cUC\u003e [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
"markdownDescription": "The quality_monitor resource allows you to define a Unity Catalog [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
},
{
"type": "string",
@ -861,7 +861,7 @@
"name",
"schema_name"
],
"markdownDescription": "The registered model resource allows you to define models in \u003cUC\u003e. For information about \u003cUC\u003e [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
"markdownDescription": "The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
},
{
"type": "string",
@ -902,7 +902,7 @@
"catalog_name",
"name"
],
"markdownDescription": "The schema resource type allows you to define \u003cUC\u003e [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
"markdownDescription": "The schema resource type allows you to define Unity Catalog [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
},
{
"type": "string",
@ -948,7 +948,7 @@
"name",
"schema_name"
],
"markdownDescription": "The volume resource type allows you to define and create \u003cUC\u003e [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use \u003cDABS\u003e to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
"markdownDescription": "The volume resource type allows you to define and create Unity Catalog [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
},
{
"type": "string",
@ -1440,9 +1440,9 @@
"markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)."
},
"registered_models": {
"description": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model.",
"description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel",
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)."
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
},
"schemas": {
"description": "The schema definitions for the bundle, where each key is the name of the schema.",
@ -2126,7 +2126,7 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.",
"$ref": "#/$defs/slice/string"
},
"label_col": {
@ -2283,7 +2283,7 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.",
"$ref": "#/$defs/slice/string"
},
"timestamp_col": {
@ -2768,7 +2768,7 @@
"$ref": "#/$defs/string"
},
"dependencies": {
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]",
"description": "List of pip dependencies, as supported by the version of pip in this environment.",
"$ref": "#/$defs/slice/string"
}
},
@ -2864,7 +2864,7 @@
"type": "object",
"properties": {
"abfss": {
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }",
"description": "Contains the Azure Data Lake Storage destination path",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
},
"dbfs": {
@ -2972,11 +2972,11 @@
"type": "object",
"properties": {
"log_analytics_primary_key": {
"description": "\u003cneeds content added\u003e",
"description": "The primary key for the Azure Log Analytics agent configuration",
"$ref": "#/$defs/string"
},
"log_analytics_workspace_id": {
"description": "\u003cneeds content added\u003e",
"description": "The workspace ID for the Azure Log Analytics agent configuration",
"$ref": "#/$defs/string"
}
},
@ -7440,12 +7440,12 @@
"resources": {
"description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about \u003cDABS\u003e supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
},
"run_as": {
"description": "The identity to use when running \u003cDABS\u003e workflows.",
"description": "The identity to use when running Databricks Asset Bundles workflows.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use when running \u003cDABS\u003e workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
"markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The files and file paths to include or exclude in the bundle.",

View File

@ -110,7 +110,7 @@ func getAuthStatus(cmd *cobra.Command, args []string, showSensitive bool, fn try
return &status, nil
}
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
me, err := w.CurrentUser.Me(ctx)
if err != nil {
return &authStatus{

View File

@ -18,7 +18,7 @@ import (
func TestGetWorkspaceAuthStatus(t *testing.T) {
ctx := context.Background()
m := mocks.NewMockWorkspaceClient(t)
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)
@ -76,7 +76,7 @@ func TestGetWorkspaceAuthStatus(t *testing.T) {
func TestGetWorkspaceAuthStatusError(t *testing.T) {
ctx := context.Background()
m := mocks.NewMockWorkspaceClient(t)
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)
@ -125,7 +125,7 @@ func TestGetWorkspaceAuthStatusError(t *testing.T) {
func TestGetWorkspaceAuthStatusSensitive(t *testing.T) {
ctx := context.Background()
m := mocks.NewMockWorkspaceClient(t)
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)

View File

@ -8,6 +8,7 @@ import (
"time"
"github.com/databricks/cli/cmd"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/auth/cache"
"github.com/databricks/cli/libs/databrickscfg/profile"
@ -106,7 +107,7 @@ func getCobraCmdForTest(f fixtures.HTTPFixture) (*cobra.Command, *bytes.Buffer)
func TestTokenCmdWithProfilePrintsHelpfulLoginMessageOnRefreshFailure(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--profile", "expired"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -117,7 +118,7 @@ func TestTokenCmdWithProfilePrintsHelpfulLoginMessageOnRefreshFailure(t *testing
func TestTokenCmdWithHostPrintsHelpfulLoginMessageOnRefreshFailure(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--host", "https://accounts.cloud.databricks.com", "--account-id", "expired"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -128,7 +129,7 @@ func TestTokenCmdWithHostPrintsHelpfulLoginMessageOnRefreshFailure(t *testing.T)
func TestTokenCmdInvalidResponse(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureInvalidResponse)
cmd.SetArgs([]string{"auth", "token", "--profile", "active"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -139,7 +140,7 @@ func TestTokenCmdInvalidResponse(t *testing.T) {
func TestTokenCmdOtherErrorResponse(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureOtherError)
cmd.SetArgs([]string{"auth", "token", "--profile", "active"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -150,7 +151,7 @@ func TestTokenCmdOtherErrorResponse(t *testing.T) {
func TestTokenCmdWithProfileSuccess(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshSuccessTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--profile", "active"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
validateToken(t, out)
@ -160,7 +161,7 @@ func TestTokenCmdWithProfileSuccess(t *testing.T) {
func TestTokenCmdWithHostSuccess(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshSuccessTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--host", "https://accounts.cloud.databricks.com", "--account-id", "expired"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
validateToken(t, out)

View File

@ -8,6 +8,7 @@ import (
"testing"
"github.com/databricks/cli/cmd"
"github.com/databricks/cli/cmd/root"
"github.com/stretchr/testify/assert"
"gopkg.in/ini.v1"
)
@ -57,7 +58,7 @@ func TestDefaultConfigureNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
cfgPath := filepath.Join(tempHomeDir, ".databrickscfg")
@ -91,7 +92,7 @@ func TestConfigFileFromEnvNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)
@ -131,7 +132,7 @@ func TestEnvVarsConfigureNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)
@ -164,7 +165,7 @@ func TestEnvVarsConfigureNoArgsNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)
@ -193,7 +194,7 @@ func TestCustomProfileConfigureNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)

View File

@ -7,6 +7,7 @@ import (
"strings"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/filer/completer"
"github.com/spf13/cobra"
@ -35,7 +36,7 @@ func filerForPath(ctx context.Context, fullPath string) (filer.Filer, string, er
}
path := parts[1]
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
// If the specified path has the "Volumes" prefix, use the Files API.
if strings.HasPrefix(path, "/Volumes/") {

View File

@ -6,7 +6,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
@ -73,7 +73,7 @@ func mockMustWorkspaceClientFunc(cmd *cobra.Command, args []string) error {
func setupCommand(t *testing.T) (*cobra.Command, *mocks.MockWorkspaceClient) {
m := mocks.NewMockWorkspaceClient(t)
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)

View File

@ -13,25 +13,31 @@ const cacheTTL = 1 * time.Hour
// NewReleaseCache creates a release cache for a repository in the GitHub org.
// Caller has to provide different cache directories for different repositories.
func NewReleaseCache(org, repo, cacheDir string) *ReleaseCache {
func NewReleaseCache(org, repo, cacheDir string, offlineInstall bool) *ReleaseCache {
pattern := fmt.Sprintf("%s-%s-releases", org, repo)
return &ReleaseCache{
cache: localcache.NewLocalCache[Versions](cacheDir, pattern, cacheTTL),
Org: org,
Repo: repo,
cache: localcache.NewLocalCache[Versions](cacheDir, pattern, cacheTTL),
Org: org,
Repo: repo,
Offline: offlineInstall,
}
}
type ReleaseCache struct {
cache localcache.LocalCache[Versions]
Org string
Repo string
cache localcache.LocalCache[Versions]
Org string
Repo string
Offline bool
}
func (r *ReleaseCache) Load(ctx context.Context) (Versions, error) {
return r.cache.Load(ctx, func() (Versions, error) {
return getVersions(ctx, r.Org, r.Repo)
})
if !r.Offline {
return r.cache.Load(ctx, func() (Versions, error) {
return getVersions(ctx, r.Org, r.Repo)
})
}
cached, err := r.cache.LoadCache()
return cached.Data, err
}
// getVersions is considered to be a private API, as we want the usage go through a cache

View File

@ -26,7 +26,7 @@ func TestLoadsReleasesForCLI(t *testing.T) {
ctx := context.Background()
ctx = WithApiOverride(ctx, server.URL)
r := NewReleaseCache("databricks", "cli", t.TempDir())
r := NewReleaseCache("databricks", "cli", t.TempDir(), false)
all, err := r.Load(ctx)
assert.NoError(t, err)
assert.Len(t, all, 2)

View File

@ -7,16 +7,20 @@ import (
)
func newInstallCommand() *cobra.Command {
return &cobra.Command{
Use: "install NAME",
Args: root.ExactArgs(1),
Short: "Installs project",
RunE: func(cmd *cobra.Command, args []string) error {
inst, err := project.NewInstaller(cmd, args[0])
if err != nil {
return err
}
return inst.Install(cmd.Context())
},
cmd := &cobra.Command{}
var offlineInstall bool
cmd.Flags().BoolVar(&offlineInstall, "offline", offlineInstall, `If installing in offline mode, set this flag to true.`)
cmd.Use = "install NAME"
cmd.Args = root.ExactArgs(1)
cmd.Short = "Installs project"
cmd.RunE = func(cmd *cobra.Command, args []string) error {
inst, err := project.NewInstaller(cmd, args[0], offlineInstall)
if err != nil {
return err
}
return inst.Install(cmd.Context())
}
return cmd
}

View File

@ -35,7 +35,7 @@ type LocalCache[T any] struct {
}
func (r *LocalCache[T]) Load(ctx context.Context, refresh func() (T, error)) (T, error) {
cached, err := r.loadCache()
cached, err := r.LoadCache()
if errors.Is(err, fs.ErrNotExist) {
return r.refreshCache(ctx, refresh, r.zero)
} else if err != nil {
@ -96,7 +96,7 @@ func (r *LocalCache[T]) FileName() string {
return filepath.Join(r.dir, r.name+".json")
}
func (r *LocalCache[T]) loadCache() (*cached[T], error) {
func (r *LocalCache[T]) LoadCache() (*cached[T], error) {
jsonFile := r.FileName()
raw, err := os.ReadFile(r.FileName())
if err != nil {

View File

@ -54,7 +54,7 @@ func (d *devInstallation) Install(ctx context.Context) error {
return d.Installer.runHook(d.Command)
}
func NewInstaller(cmd *cobra.Command, name string) (installable, error) {
func NewInstaller(cmd *cobra.Command, name string, offlineInstall bool) (installable, error) {
if name == "." {
wd, err := os.Getwd()
if err != nil {
@ -75,28 +75,32 @@ func NewInstaller(cmd *cobra.Command, name string) (installable, error) {
version = "latest"
}
f := &fetcher{name}
version, err := f.checkReleasedVersions(cmd, version)
version, err := f.checkReleasedVersions(cmd, version, offlineInstall)
if err != nil {
return nil, fmt.Errorf("version: %w", err)
}
prj, err := f.loadRemoteProjectDefinition(cmd, version)
prj, err := f.loadRemoteProjectDefinition(cmd, version, offlineInstall)
if err != nil {
return nil, fmt.Errorf("remote: %w", err)
}
return &installer{
Project: prj,
version: version,
cmd: cmd,
Project: prj,
version: version,
cmd: cmd,
offlineInstall: offlineInstall,
}, nil
}
func NewUpgrader(cmd *cobra.Command, name string) (*installer, error) {
f := &fetcher{name}
version, err := f.checkReleasedVersions(cmd, "latest")
version, err := f.checkReleasedVersions(cmd, "latest", false)
if err != nil {
return nil, fmt.Errorf("version: %w", err)
}
prj, err := f.loadRemoteProjectDefinition(cmd, version)
prj, err := f.loadRemoteProjectDefinition(cmd, version, false)
if err != nil {
return nil, fmt.Errorf("remote: %w", err)
}
@ -115,7 +119,7 @@ type fetcher struct {
name string
}
func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string) (string, error) {
func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string, offlineInstall bool) (string, error) {
ctx := cmd.Context()
cacheDir, err := PathInLabs(ctx, f.name, "cache")
if err != nil {
@ -123,7 +127,8 @@ func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string) (str
}
// `databricks labs isntall X` doesn't know which exact version to fetch, so first
// we fetch all versions and then pick the latest one dynamically.
versions, err := github.NewReleaseCache("databrickslabs", f.name, cacheDir).Load(ctx)
var versions github.Versions
versions, err = github.NewReleaseCache("databrickslabs", f.name, cacheDir, offlineInstall).Load(ctx)
if err != nil {
return "", fmt.Errorf("versions: %w", err)
}
@ -140,11 +145,23 @@ func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string) (str
return version, nil
}
func (i *fetcher) loadRemoteProjectDefinition(cmd *cobra.Command, version string) (*Project, error) {
func (i *fetcher) loadRemoteProjectDefinition(cmd *cobra.Command, version string, offlineInstall bool) (*Project, error) {
ctx := cmd.Context()
raw, err := github.ReadFileFromRef(ctx, "databrickslabs", i.name, version, "labs.yml")
if err != nil {
return nil, fmt.Errorf("read labs.yml from GitHub: %w", err)
var raw []byte
var err error
if !offlineInstall {
raw, err = github.ReadFileFromRef(ctx, "databrickslabs", i.name, version, "labs.yml")
if err != nil {
return nil, fmt.Errorf("read labs.yml from GitHub: %w", err)
}
} else {
libDir, _ := PathInLabs(ctx, i.name, "lib")
fileName := filepath.Join(libDir, "labs.yml")
raw, err = os.ReadFile(fileName)
if err != nil {
return nil, fmt.Errorf("read labs.yml from local path %s: %w", libDir, err)
}
}
return readFromBytes(ctx, raw)
}

View File

@ -76,7 +76,8 @@ type installer struct {
// command instance is used for:
// - auth profile flag override
// - standard input, output, and error streams
cmd *cobra.Command
cmd *cobra.Command
offlineInstall bool
}
func (i *installer) Install(ctx context.Context) error {
@ -101,9 +102,15 @@ func (i *installer) Install(ctx context.Context) error {
} else if err != nil {
return fmt.Errorf("login: %w", err)
}
err = i.downloadLibrary(ctx)
if err != nil {
return fmt.Errorf("lib: %w", err)
if !i.offlineInstall {
err = i.downloadLibrary(ctx)
if err != nil {
return fmt.Errorf("lib: %w", err)
}
}
if _, err := os.Stat(i.LibDir()); os.IsNotExist(err) {
return fmt.Errorf("no local installation found: %w", err)
}
err = i.setupPythonVirtualEnvironment(ctx, w)
if err != nil {

View File

@ -241,6 +241,45 @@ func TestInstallerWorksForReleases(t *testing.T) {
r.RunAndExpectOutput("setting up important infrastructure")
}
func TestOfflineInstallerWorksForReleases(t *testing.T) {
// This cmd is useful in systems where there is internet restriction, the user should follow a set-up as follows:
// install a labs project on a machine which has internet
// zip and copy the file to the intended machine and
// run databricks labs install --offline=true
// it will look for the code in the same install directory and if present, install from there.
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/api/2.1/clusters/get" {
respondWithJSON(t, w, &compute.ClusterDetails{
State: compute.StateRunning,
})
return
}
t.Logf("Requested: %s", r.URL.Path)
t.FailNow()
}))
defer server.Close()
ctx := installerContext(t, server)
newHome := copyTestdata(t, "testdata/installed-in-home")
ctx = env.WithUserHomeDir(ctx, newHome)
ctx, stub := process.WithStub(ctx)
stub.WithStdoutFor(`python[\S]+ --version`, "Python 3.10.5")
// on Unix, we call `python3`, but on Windows it is `python.exe`
stub.WithStderrFor(`python[\S]+ -m venv .*/.databricks/labs/blueprint/state/venv`, "[mock venv create]")
stub.WithStderrFor(`python[\S]+ -m pip install --upgrade --upgrade-strategy eager .`, "[mock pip install]")
stub.WithStdoutFor(`python[\S]+ install.py`, "setting up important infrastructure")
// simulate the case of GitHub Actions
ctx = env.Set(ctx, "DATABRICKS_HOST", server.URL)
ctx = env.Set(ctx, "DATABRICKS_TOKEN", "...")
ctx = env.Set(ctx, "DATABRICKS_CLUSTER_ID", "installer-cluster")
ctx = env.Set(ctx, "DATABRICKS_WAREHOUSE_ID", "installer-warehouse")
r := testcli.NewRunner(t, ctx, "labs", "install", "blueprint", "--offline=true", "--debug")
r.RunAndExpectOutput("setting up important infrastructure")
}
func TestInstallerWorksForDevelopment(t *testing.T) {
defer func() {
if !t.Failed() {

View File

@ -307,7 +307,7 @@ func (p *Project) checkUpdates(cmd *cobra.Command) error {
// might not be installed yet
return nil
}
r := github.NewReleaseCache("databrickslabs", p.Name, p.CacheDir())
r := github.NewReleaseCache("databrickslabs", p.Name, p.CacheDir(), false)
versions, err := r.Load(ctx)
if err != nil {
return err

View File

@ -17,7 +17,7 @@ import (
// Placeholders to use as unique keys in context.Context.
var (
workspaceClient int
accountClient int
)
type ErrNoWorkspaceProfiles struct {
@ -228,15 +228,11 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error {
return err
}
ctx = context.WithValue(ctx, &workspaceClient, w)
ctx = command.SetWorkspaceClient(ctx, w)
cmd.SetContext(ctx)
return nil
}
func SetWorkspaceClient(ctx context.Context, w *databricks.WorkspaceClient) context.Context {
return context.WithValue(ctx, &workspaceClient, w)
}
func AskForWorkspaceProfile(ctx context.Context) (string, error) {
profiler := profile.GetProfiler(ctx)
path, err := profiler.GetPath(ctx)
@ -315,11 +311,3 @@ func emptyHttpRequest(ctx context.Context) *http.Request {
}
return req
}
func WorkspaceClient(ctx context.Context) *databricks.WorkspaceClient {
w, ok := ctx.Value(&workspaceClient).(*databricks.WorkspaceClient)
if !ok {
panic("cannot get *databricks.WorkspaceClient. Please report it as a bug")
}
return w
}

View File

@ -264,7 +264,7 @@ func TestMustAnyClientCanCreateWorkspaceClient(t *testing.T) {
require.False(t, isAccount)
require.NoError(t, err)
w := WorkspaceClient(cmd.Context())
w := command.WorkspaceClient(cmd.Context())
require.NotNil(t, w)
}

View File

@ -213,7 +213,7 @@ func TestTargetFlagFull(t *testing.T) {
cmd.SetArgs([]string{"version", "--target", "development"})
ctx := context.Background()
err := cmd.ExecuteContext(ctx)
err := Execute(ctx, cmd)
assert.NoError(t, err)
assert.Equal(t, "development", getTarget(cmd))
@ -225,7 +225,7 @@ func TestTargetFlagShort(t *testing.T) {
cmd.SetArgs([]string{"version", "-t", "production"})
ctx := context.Background()
err := cmd.ExecuteContext(ctx)
err := Execute(ctx, cmd)
assert.NoError(t, err)
assert.Equal(t, "production", getTarget(cmd))
@ -239,7 +239,7 @@ func TestTargetEnvironmentFlag(t *testing.T) {
cmd.SetArgs([]string{"version", "--environment", "development"})
ctx := context.Background()
err := cmd.ExecuteContext(ctx)
err := Execute(ctx, cmd)
assert.NoError(t, err)
assert.Equal(t, "development", getTarget(cmd))

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/internal/build"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/log"
"github.com/spf13/cobra"
@ -124,6 +125,9 @@ Stack Trace:
%s`, version, r, string(trace))
}()
// Set a command execution ID value in the context
ctx = command.GenerateExecId(ctx)
// Run the command
cmd, err = cmd.ExecuteContextC(ctx)
if err != nil && !errors.Is(err, ErrAlreadyPrinted) {

View File

@ -3,12 +3,12 @@ package root
import (
"context"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/useragent"
"github.com/google/uuid"
)
func withCommandExecIdInUserAgent(ctx context.Context) context.Context {
// A UUID that will allow us to correlate multiple API requests made by
// the same CLI invocation.
return useragent.InContext(ctx, "cmd-exec-id", uuid.New().String())
return useragent.InContext(ctx, "cmd-exec-id", command.ExecId(ctx))
}

View File

@ -2,25 +2,18 @@ package root
import (
"context"
"regexp"
"testing"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/useragent"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestWithCommandExecIdInUserAgent(t *testing.T) {
ctx := withCommandExecIdInUserAgent(context.Background())
ctx := command.GenerateExecId(context.Background())
ctx = withCommandExecIdInUserAgent(ctx)
// Check that the command exec ID is in the user agent string.
// user agent should contain cmd-exec-id/<UUID>
ua := useragent.FromContext(ctx)
re := regexp.MustCompile(`cmd-exec-id/([a-f0-9-]+)`)
matches := re.FindAllStringSubmatch(ua, -1)
// Assert that we have exactly one match and that it's a valid UUID.
require.Len(t, matches, 1)
_, err := uuid.Parse(matches[0][1])
assert.NoError(t, err)
assert.Regexp(t, `cmd-exec-id/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, ua)
}

View File

@ -12,6 +12,7 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/log"
@ -65,7 +66,7 @@ func (f *syncFlags) syncOptionsFromArgs(cmd *cobra.Command, args []string) (*syn
}
ctx := cmd.Context()
client := root.WorkspaceClient(ctx)
client := command.WorkspaceClient(ctx)
localRoot := vfs.MustNew(args[0])
info, err := git.FetchRepositoryInfo(ctx, localRoot.Native(), client)
@ -186,7 +187,7 @@ func New() *cobra.Command {
case 0:
return nil, cobra.ShellCompDirectiveFilterDirs
case 1:
wsc := root.WorkspaceClient(cmd.Context())
wsc := command.WorkspaceClient(cmd.Context())
return completeRemotePath(cmd.Context(), wsc, toComplete)
default:
return nil, cobra.ShellCompDirectiveNoFileComp

View File

@ -8,7 +8,7 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/vfs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -58,7 +58,7 @@ func TestSyncOptionsFromArgs(t *testing.T) {
f := syncFlags{}
cmd := New()
cmd.SetContext(root.SetWorkspaceClient(context.Background(), nil))
cmd.SetContext(command.SetWorkspaceClient(context.Background(), nil))
opts, err := f.syncOptionsFromArgs(cmd, []string{local, remote})
require.NoError(t, err)
assert.Equal(t, local, opts.LocalRoot.Native())

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/spf13/cobra"
@ -70,7 +71,7 @@ func newCheckPolicy() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := checkPolicyJson.Unmarshal(&checkPolicyReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -73,7 +74,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingAccessPolicy().Delete(ctx, deleteReq)
if err != nil {
@ -130,7 +131,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingAccessPolicy().Get(ctx, getReq)
if err != nil {
@ -180,7 +181,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -73,7 +74,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingApprovedDomains().Delete(ctx, deleteReq)
if err != nil {
@ -128,7 +129,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingApprovedDomains().Get(ctx, getReq)
if err != nil {
@ -180,7 +181,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
@ -90,7 +91,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -160,7 +161,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -232,7 +233,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -299,7 +300,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.AlertsLegacy.List(ctx)
if err != nil {
return err
@ -360,7 +361,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
@ -82,7 +83,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -145,7 +146,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -212,7 +213,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -289,7 +290,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Alerts.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -364,7 +365,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
@ -114,7 +115,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.App)
@ -205,7 +206,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -277,7 +278,7 @@ func newDeploy() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deployJson.Unmarshal(&deployReq.AppDeployment)
@ -366,7 +367,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -426,7 +427,7 @@ func newGetDeployment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getDeploymentReq.AppName = args[0]
getDeploymentReq.DeploymentId = args[1]
@ -485,7 +486,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionLevelsReq.AppName = args[0]
@ -544,7 +545,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionsReq.AppName = args[0]
@ -602,7 +603,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Apps.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -658,7 +659,7 @@ func newListDeployments() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listDeploymentsReq.AppName = args[0]
@ -719,7 +720,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -794,7 +795,7 @@ func newStart() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
startReq.Name = args[0]
@ -876,7 +877,7 @@ func newStop() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
stopReq.Name = args[0]
@ -965,7 +966,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq.App)
@ -1040,7 +1041,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -77,7 +78,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
_, err = fmt.Sscan(args[0], &getReq.ArtifactType)
if err != nil {
@ -142,7 +143,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -70,7 +71,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AutomaticClusterUpdate().Get(ctx, getReq)
if err != nil {
@ -124,7 +125,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -102,7 +103,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -177,7 +178,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -239,7 +240,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -302,7 +303,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Catalogs.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -367,7 +368,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/cleanrooms"
"github.com/spf13/cobra"
@ -96,7 +97,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.Asset)
@ -169,7 +170,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.CleanRoomName = args[0]
_, err = fmt.Sscan(args[1], &deleteReq.AssetType)
@ -235,7 +236,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.CleanRoomName = args[0]
_, err = fmt.Sscan(args[1], &getReq.AssetType)
@ -298,7 +299,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.CleanRoomName = args[0]
@ -376,7 +377,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq.Asset)

View File

@ -5,6 +5,7 @@ package clean_room_task_runs
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/service/cleanrooms"
"github.com/spf13/cobra"
)
@ -74,7 +75,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.CleanRoomName = args[0]

View File

@ -5,6 +5,7 @@ package clean_rooms
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/cleanrooms"
"github.com/spf13/cobra"
@ -92,7 +93,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.CleanRoom)
@ -166,7 +167,7 @@ func newCreateOutputCatalog() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createOutputCatalogJson.Unmarshal(&createOutputCatalogReq.OutputCatalog)
@ -239,7 +240,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -294,7 +295,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -353,7 +354,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.CleanRooms.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -413,7 +414,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
@ -110,7 +111,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -188,7 +189,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteJson.Unmarshal(&deleteReq)
@ -293,7 +294,7 @@ func newEdit() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := editJson.Unmarshal(&editReq)
@ -376,7 +377,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -446,7 +447,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -517,7 +518,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -592,7 +593,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ClusterPolicies.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -646,7 +647,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -733,7 +734,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
@ -131,7 +132,7 @@ func newChangeOwner() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := changeOwnerJson.Unmarshal(&changeOwnerReq)
@ -277,7 +278,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -377,7 +378,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteJson.Unmarshal(&deleteReq)
@ -546,7 +547,7 @@ func newEdit() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := editJson.Unmarshal(&editReq)
@ -650,7 +651,7 @@ func newEvents() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := eventsJson.Unmarshal(&eventsReq)
@ -736,7 +737,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -806,7 +807,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -877,7 +878,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -956,7 +957,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Clusters.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -997,7 +998,7 @@ func newListNodeTypes() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Clusters.ListNodeTypes(ctx)
if err != nil {
return err
@ -1040,7 +1041,7 @@ func newListZones() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Clusters.ListZones(ctx)
if err != nil {
return err
@ -1108,7 +1109,7 @@ func newPermanentDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := permanentDeleteJson.Unmarshal(&permanentDeleteReq)
@ -1206,7 +1207,7 @@ func newPin() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := pinJson.Unmarshal(&pinReq)
@ -1311,7 +1312,7 @@ func newResize() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := resizeJson.Unmarshal(&resizeReq)
@ -1427,7 +1428,7 @@ func newRestart() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := restartJson.Unmarshal(&restartReq)
@ -1528,7 +1529,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -1603,7 +1604,7 @@ func newSparkVersions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Clusters.SparkVersions(ctx)
if err != nil {
return err
@ -1678,7 +1679,7 @@ func newStart() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := startJson.Unmarshal(&startReq)
@ -1788,7 +1789,7 @@ func newUnpin() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := unpinJson.Unmarshal(&unpinReq)
@ -1906,7 +1907,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)
@ -1993,7 +1994,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -73,7 +74,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.ComplianceSecurityProfile().Get(ctx, getReq)
if err != nil {
@ -127,7 +128,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -89,7 +90,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -155,7 +156,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -225,7 +226,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -300,7 +301,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Connections.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -358,7 +359,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -5,6 +5,7 @@ package consumer_fulfillments
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/service/marketplace"
"github.com/spf13/cobra"
)
@ -71,7 +72,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.ListingId = args[0]
@ -130,7 +131,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.ListingId = args[0]

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/marketplace"
"github.com/spf13/cobra"
@ -83,7 +84,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -150,7 +151,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.ListingId = args[0]
deleteReq.InstallationId = args[1]
@ -209,7 +210,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ConsumerInstallations.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -262,7 +263,7 @@ func newListListingInstallations() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listListingInstallationsReq.ListingId = args[0]
@ -322,7 +323,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/service/marketplace"
"github.com/spf13/cobra"
)
@ -76,7 +77,7 @@ func newBatchGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.ConsumerListings.BatchGet(ctx, batchGetReq)
if err != nil {
@ -125,7 +126,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -208,7 +209,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ConsumerListings.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -266,7 +267,7 @@ func newSearch() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/marketplace"
"github.com/spf13/cobra"
@ -82,7 +83,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -152,7 +153,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.ListingId = args[0]
@ -210,7 +211,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ConsumerPersonalizationRequests.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/service/marketplace"
"github.com/spf13/cobra"
)
@ -74,7 +75,7 @@ func newBatchGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.ConsumerProviders.BatchGet(ctx, batchGetReq)
if err != nil {
@ -123,7 +124,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -200,7 +201,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ConsumerProviders.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -72,7 +73,7 @@ func newExchangeToken() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := exchangeTokenJson.Unmarshal(&exchangeTokenReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -110,7 +111,7 @@ func newCreateCredential() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createCredentialJson.Unmarshal(&createCredentialReq)
@ -185,7 +186,7 @@ func newDeleteCredential() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteCredentialReq.NameArg = args[0]
@ -257,7 +258,7 @@ func newGenerateTemporaryServiceCredential() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := generateTemporaryServiceCredentialJson.Unmarshal(&generateTemporaryServiceCredentialReq)
@ -331,7 +332,7 @@ func newGetCredential() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getCredentialReq.NameArg = args[0]
@ -395,7 +396,7 @@ func newListCredentials() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Credentials.ListCredentials(ctx, listCredentialsReq)
return cmdio.RenderIterator(ctx, response)
@ -466,7 +467,7 @@ func newUpdateCredential() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateCredentialJson.Unmarshal(&updateCredentialReq)
@ -558,7 +559,7 @@ func newValidateCredential() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := validateCredentialJson.Unmarshal(&validateCredentialReq)

View File

@ -5,6 +5,7 @@ package current_user
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/spf13/cobra"
)
@ -57,7 +58,7 @@ func newMe() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.CurrentUser.Me(ctx)
if err != nil {
return err

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
@ -72,7 +73,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -141,7 +142,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Id = args[0]
@ -199,7 +200,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
@ -75,7 +76,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -139,7 +140,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -207,7 +208,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -287,7 +288,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Dashboards.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -332,7 +333,7 @@ func newRestore() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -408,7 +409,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -5,6 +5,7 @@ package data_sources
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/spf13/cobra"
)
@ -77,7 +78,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.DataSources.List(ctx)
if err != nil {
return err

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -85,7 +86,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.DefaultNamespace().Delete(ctx, deleteReq)
if err != nil {
@ -140,7 +141,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.DefaultNamespace().Get(ctx, getReq)
if err != nil {
@ -196,7 +197,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -79,7 +80,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.DisableLegacyAccess().Delete(ctx, deleteReq)
if err != nil {
@ -134,7 +135,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.DisableLegacyAccess().Get(ctx, getReq)
if err != nil {
@ -184,7 +185,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -76,7 +77,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.DisableLegacyDbfs().Delete(ctx, deleteReq)
if err != nil {
@ -131,7 +132,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.DisableLegacyDbfs().Get(ctx, getReq)
if err != nil {
@ -181,7 +182,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -75,7 +76,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.EnhancedSecurityMonitoring().Get(ctx, getReq)
if err != nil {
@ -129,7 +130,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/ml"
"github.com/spf13/cobra"
@ -127,7 +128,7 @@ func newCreateExperiment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createExperimentJson.Unmarshal(&createExperimentReq)
@ -207,7 +208,7 @@ func newCreateRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createRunJson.Unmarshal(&createRunReq)
@ -287,7 +288,7 @@ func newDeleteExperiment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteExperimentJson.Unmarshal(&deleteExperimentReq)
@ -368,7 +369,7 @@ func newDeleteRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteRunJson.Unmarshal(&deleteRunReq)
@ -456,7 +457,7 @@ func newDeleteRuns() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteRunsJson.Unmarshal(&deleteRunsReq)
@ -545,7 +546,7 @@ func newDeleteTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteTagJson.Unmarshal(&deleteTagReq)
@ -628,7 +629,7 @@ func newGetByName() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getByNameReq.ExperimentName = args[0]
@ -686,7 +687,7 @@ func newGetExperiment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getExperimentReq.ExperimentId = args[0]
@ -749,7 +750,7 @@ func newGetHistory() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getHistoryReq.MetricKey = args[0]
@ -804,7 +805,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionLevelsReq.ExperimentId = args[0]
@ -863,7 +864,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionsReq.ExperimentId = args[0]
@ -928,7 +929,7 @@ func newGetRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getRunReq.RunId = args[0]
@ -993,7 +994,7 @@ func newListArtifacts() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Experiments.ListArtifacts(ctx, listArtifactsReq)
return cmdio.RenderIterator(ctx, response)
@ -1047,7 +1048,7 @@ func newListExperiments() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Experiments.ListExperiments(ctx, listExperimentsReq)
return cmdio.RenderIterator(ctx, response)
@ -1146,7 +1147,7 @@ func newLogBatch() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := logBatchJson.Unmarshal(&logBatchReq)
@ -1229,7 +1230,7 @@ func newLogInputs() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := logInputsJson.Unmarshal(&logInputsReq)
@ -1318,7 +1319,7 @@ func newLogMetric() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := logMetricJson.Unmarshal(&logMetricReq)
@ -1405,7 +1406,7 @@ func newLogModel() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := logModelJson.Unmarshal(&logModelReq)
@ -1490,7 +1491,7 @@ func newLogParam() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := logParamJson.Unmarshal(&logParamReq)
@ -1579,7 +1580,7 @@ func newRestoreExperiment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := restoreExperimentJson.Unmarshal(&restoreExperimentReq)
@ -1664,7 +1665,7 @@ func newRestoreRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := restoreRunJson.Unmarshal(&restoreRunReq)
@ -1752,7 +1753,7 @@ func newRestoreRuns() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := restoreRunsJson.Unmarshal(&restoreRunsReq)
@ -1835,7 +1836,7 @@ func newSearchExperiments() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := searchExperimentsJson.Unmarshal(&searchExperimentsReq)
@ -1909,7 +1910,7 @@ func newSearchRuns() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := searchRunsJson.Unmarshal(&searchRunsReq)
@ -1987,7 +1988,7 @@ func newSetExperimentTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setExperimentTagJson.Unmarshal(&setExperimentTagReq)
@ -2071,7 +2072,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -2156,7 +2157,7 @@ func newSetTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setTagJson.Unmarshal(&setTagReq)
@ -2242,7 +2243,7 @@ func newUpdateExperiment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateExperimentJson.Unmarshal(&updateExperimentReq)
@ -2319,7 +2320,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)
@ -2394,7 +2395,7 @@ func newUpdateRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateRunJson.Unmarshal(&updateRunReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -109,7 +110,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -190,7 +191,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -252,7 +253,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -314,7 +315,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ExternalLocations.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -384,7 +385,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -82,7 +83,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -156,7 +157,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -235,7 +236,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -321,7 +322,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.CatalogName = args[0]
listReq.SchemaName = args[1]
@ -384,7 +385,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/spf13/cobra"
@ -105,7 +106,7 @@ func newCreateMessage() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createMessageJson.Unmarshal(&createMessageReq)
@ -194,7 +195,7 @@ func newExecuteMessageQuery() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
executeMessageQueryReq.SpaceId = args[0]
executeMessageQueryReq.ConversationId = args[1]
@ -258,7 +259,7 @@ func newGetMessage() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getMessageReq.SpaceId = args[0]
getMessageReq.ConversationId = args[1]
@ -322,7 +323,7 @@ func newGetMessageQueryResult() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getMessageQueryResultReq.SpaceId = args[0]
getMessageQueryResultReq.ConversationId = args[1]
@ -387,7 +388,7 @@ func newGetMessageQueryResultByAttachment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getMessageQueryResultByAttachmentReq.SpaceId = args[0]
getMessageQueryResultByAttachmentReq.ConversationId = args[1]
@ -448,7 +449,7 @@ func newGetSpace() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getSpaceReq.SpaceId = args[0]
@ -522,7 +523,7 @@ func newStartConversation() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := startConversationJson.Unmarshal(&startConversationReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/spf13/cobra"
@ -100,7 +101,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -167,7 +168,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -240,7 +241,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -306,7 +307,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.GitCredentials.List(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -374,7 +375,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
@ -98,7 +99,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -168,7 +169,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -238,7 +239,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -303,7 +304,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.GlobalInitScripts.List(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -370,7 +371,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -88,7 +89,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
_, err = fmt.Sscan(args[0], &getReq.SecurableType)
if err != nil {
@ -153,7 +154,7 @@ func newGetEffective() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
_, err = fmt.Sscan(args[0], &getEffectiveReq.SecurableType)
if err != nil {
@ -220,7 +221,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/spf13/cobra"
@ -94,7 +95,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -158,7 +159,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -228,7 +229,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -308,7 +309,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Groups.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -361,7 +362,7 @@ func newPatch() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := patchJson.Unmarshal(&patchReq)
@ -455,7 +456,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
@ -125,7 +126,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -209,7 +210,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteJson.Unmarshal(&deleteReq)
@ -318,7 +319,7 @@ func newEdit() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := editJson.Unmarshal(&editReq)
@ -391,7 +392,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -461,7 +462,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -532,7 +533,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -594,7 +595,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.InstancePools.List(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -647,7 +648,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -734,7 +735,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
@ -96,7 +97,7 @@ func newAdd() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := addJson.Unmarshal(&addReq)
@ -195,7 +196,7 @@ func newEdit() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := editJson.Unmarshal(&editReq)
@ -256,7 +257,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.InstanceProfiles.List(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -320,7 +321,7 @@ func newRemove() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := removeJson.Unmarshal(&removeReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -130,7 +131,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -203,7 +204,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -273,7 +274,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -335,7 +336,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.IpAccessLists.List(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -417,7 +418,7 @@ func newReplace() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := replaceJson.Unmarshal(&replaceReq)
@ -519,7 +520,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/spf13/cobra"
@ -113,7 +114,7 @@ func newCancelAllRuns() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := cancelAllRunsJson.Unmarshal(&cancelAllRunsReq)
@ -196,7 +197,7 @@ func newCancelRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := cancelRunJson.Unmarshal(&cancelRunReq)
@ -300,7 +301,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -379,7 +380,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteJson.Unmarshal(&deleteReq)
@ -478,7 +479,7 @@ func newDeleteRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deleteRunJson.Unmarshal(&deleteRunReq)
@ -566,7 +567,7 @@ func newExportRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -648,7 +649,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -721,7 +722,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -792,7 +793,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -878,7 +879,7 @@ func newGetRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -960,7 +961,7 @@ func newGetRunOutput() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -1041,7 +1042,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Jobs.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -1102,7 +1103,7 @@ func newListRuns() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Jobs.ListRuns(ctx, listRunsReq)
return cmdio.RenderIterator(ctx, response)
@ -1184,7 +1185,7 @@ func newRepairRun() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := repairRunJson.Unmarshal(&repairRunReq)
@ -1289,7 +1290,7 @@ func newReset() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := resetJson.Unmarshal(&resetReq)
@ -1387,7 +1388,7 @@ func newRunNow() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := runNowJson.Unmarshal(&runNowReq)
@ -1498,7 +1499,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -1607,7 +1608,7 @@ func newSubmit() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := submitJson.Unmarshal(&submitReq)
@ -1707,7 +1708,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)
@ -1798,7 +1799,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)

View File

@ -4,6 +4,7 @@ package lakeview_embedded
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/spf13/cobra"
)
@ -72,7 +73,7 @@ func newGetPublishedDashboardEmbedded() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPublishedDashboardEmbeddedReq.DashboardId = args[0]

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/spf13/cobra"
@ -96,7 +97,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.Dashboard)
@ -170,7 +171,7 @@ func newCreateSchedule() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createScheduleJson.Unmarshal(&createScheduleReq.Schedule)
@ -242,7 +243,7 @@ func newCreateSubscription() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createSubscriptionJson.Unmarshal(&createSubscriptionReq.Subscription)
@ -312,7 +313,7 @@ func newDeleteSchedule() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteScheduleReq.DashboardId = args[0]
deleteScheduleReq.ScheduleId = args[1]
@ -371,7 +372,7 @@ func newDeleteSubscription() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteSubscriptionReq.DashboardId = args[0]
deleteSubscriptionReq.ScheduleId = args[1]
@ -431,7 +432,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.DashboardId = args[0]
@ -489,7 +490,7 @@ func newGetPublished() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPublishedReq.DashboardId = args[0]
@ -546,7 +547,7 @@ func newGetSchedule() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getScheduleReq.DashboardId = args[0]
getScheduleReq.ScheduleId = args[1]
@ -605,7 +606,7 @@ func newGetSubscription() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getSubscriptionReq.DashboardId = args[0]
getSubscriptionReq.ScheduleId = args[1]
@ -664,7 +665,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Lakeview.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -717,7 +718,7 @@ func newListSchedules() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listSchedulesReq.DashboardId = args[0]
@ -773,7 +774,7 @@ func newListSubscriptions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listSubscriptionsReq.DashboardId = args[0]
listSubscriptionsReq.ScheduleId = args[1]
@ -842,7 +843,7 @@ func newMigrate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := migrateJson.Unmarshal(&migrateReq)
@ -919,7 +920,7 @@ func newPublish() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := publishJson.Unmarshal(&publishReq)
@ -989,7 +990,7 @@ func newTrash() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
trashReq.DashboardId = args[0]
@ -1047,7 +1048,7 @@ func newUnpublish() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
unpublishReq.DashboardId = args[0]
@ -1112,7 +1113,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq.Dashboard)
@ -1188,7 +1189,7 @@ func newUpdateSchedule() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateScheduleJson.Unmarshal(&updateScheduleReq.Schedule)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
@ -79,7 +80,7 @@ func newAllClusterStatuses() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Libraries.AllClusterStatuses(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -137,7 +138,7 @@ func newClusterStatus() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
clusterStatusReq.ClusterId = args[0]
@ -187,7 +188,7 @@ func newInstall() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := installJson.Unmarshal(&installReq)
@ -254,7 +255,7 @@ func newUninstall() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := uninstallJson.Unmarshal(&uninstallReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -109,7 +110,7 @@ func newAssign() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := assignJson.Unmarshal(&assignReq)
@ -204,7 +205,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -263,7 +264,7 @@ func newCurrent() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Metastores.Current(ctx)
if err != nil {
return err
@ -315,7 +316,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -386,7 +387,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -450,7 +451,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Metastores.List(ctx)
return cmdio.RenderIterator(ctx, response)
}
@ -490,7 +491,7 @@ func newSummary() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Metastores.Summary(ctx)
if err != nil {
return err
@ -546,7 +547,7 @@ func newUnassign() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
_, err = fmt.Sscan(args[0], &unassignReq.WorkspaceId)
if err != nil {
@ -615,7 +616,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)
@ -705,7 +706,7 @@ func newUpdateAssignment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateAssignmentJson.Unmarshal(&updateAssignmentReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/ml"
"github.com/spf13/cobra"
@ -138,7 +139,7 @@ func newApproveTransitionRequest() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := approveTransitionRequestJson.Unmarshal(&approveTransitionRequestReq)
@ -238,7 +239,7 @@ func newCreateComment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createCommentJson.Unmarshal(&createCommentReq)
@ -331,7 +332,7 @@ func newCreateModel() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createModelJson.Unmarshal(&createModelReq)
@ -418,7 +419,7 @@ func newCreateModelVersion() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createModelVersionJson.Unmarshal(&createModelVersionReq)
@ -514,7 +515,7 @@ func newCreateTransitionRequest() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createTransitionRequestJson.Unmarshal(&createTransitionRequestReq)
@ -597,7 +598,7 @@ func newCreateWebhook() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createWebhookJson.Unmarshal(&createWebhookReq)
@ -665,7 +666,7 @@ func newDeleteComment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteCommentReq.Id = args[0]
@ -723,7 +724,7 @@ func newDeleteModel() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteModelReq.Name = args[0]
@ -783,7 +784,7 @@ func newDeleteModelTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteModelTagReq.Name = args[0]
deleteModelTagReq.Key = args[1]
@ -843,7 +844,7 @@ func newDeleteModelVersion() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteModelVersionReq.Name = args[0]
deleteModelVersionReq.Version = args[1]
@ -905,7 +906,7 @@ func newDeleteModelVersionTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteModelVersionTagReq.Name = args[0]
deleteModelVersionTagReq.Version = args[1]
@ -980,7 +981,7 @@ func newDeleteTransitionRequest() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteTransitionRequestReq.Name = args[0]
deleteTransitionRequestReq.Version = args[1]
@ -1045,7 +1046,7 @@ func newDeleteWebhook() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
err = w.ModelRegistry.DeleteWebhook(ctx, deleteWebhookReq)
if err != nil {
@ -1112,7 +1113,7 @@ func newGetLatestVersions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := getLatestVersionsJson.Unmarshal(&getLatestVersionsReq)
@ -1185,7 +1186,7 @@ func newGetModel() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getModelReq.Name = args[0]
@ -1244,7 +1245,7 @@ func newGetModelVersion() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getModelVersionReq.Name = args[0]
getModelVersionReq.Version = args[1]
@ -1304,7 +1305,7 @@ func newGetModelVersionDownloadUri() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getModelVersionDownloadUriReq.Name = args[0]
getModelVersionDownloadUriReq.Version = args[1]
@ -1363,7 +1364,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionLevelsReq.RegisteredModelId = args[0]
@ -1422,7 +1423,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionsReq.RegisteredModelId = args[0]
@ -1481,7 +1482,7 @@ func newListModels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ModelRegistry.ListModels(ctx, listModelsReq)
return cmdio.RenderIterator(ctx, response)
@ -1535,7 +1536,7 @@ func newListTransitionRequests() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listTransitionRequestsReq.Name = args[0]
listTransitionRequestsReq.Version = args[1]
@ -1594,7 +1595,7 @@ func newListWebhooks() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ModelRegistry.ListWebhooks(ctx, listWebhooksReq)
return cmdio.RenderIterator(ctx, response)
@ -1668,7 +1669,7 @@ func newRejectTransitionRequest() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := rejectTransitionRequestJson.Unmarshal(&rejectTransitionRequestReq)
@ -1760,7 +1761,7 @@ func newRenameModel() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := renameModelJson.Unmarshal(&renameModelReq)
@ -1834,7 +1835,7 @@ func newSearchModelVersions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ModelRegistry.SearchModelVersions(ctx, searchModelVersionsReq)
return cmdio.RenderIterator(ctx, response)
@ -1889,7 +1890,7 @@ func newSearchModels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.ModelRegistry.SearchModels(ctx, searchModelsReq)
return cmdio.RenderIterator(ctx, response)
@ -1958,7 +1959,7 @@ func newSetModelTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setModelTagJson.Unmarshal(&setModelTagReq)
@ -2053,7 +2054,7 @@ func newSetModelVersionTag() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setModelVersionTagJson.Unmarshal(&setModelVersionTagReq)
@ -2140,7 +2141,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -2236,7 +2237,7 @@ func newTestRegistryWebhook() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := testRegistryWebhookJson.Unmarshal(&testRegistryWebhookReq)
@ -2335,7 +2336,7 @@ func newTransitionStage() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := transitionStageJson.Unmarshal(&transitionStageReq)
@ -2432,7 +2433,7 @@ func newUpdateComment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateCommentJson.Unmarshal(&updateCommentReq)
@ -2518,7 +2519,7 @@ func newUpdateModel() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateModelJson.Unmarshal(&updateModelReq)
@ -2602,7 +2603,7 @@ func newUpdateModelVersion() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateModelVersionJson.Unmarshal(&updateModelVersionReq)
@ -2682,7 +2683,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)
@ -2769,7 +2770,7 @@ func newUpdateWebhook() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateWebhookJson.Unmarshal(&updateWebhookReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -90,7 +91,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.FullName = args[0]
_, err = fmt.Sscan(args[1], &deleteReq.Version)
@ -161,7 +162,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.FullName = args[0]
_, err = fmt.Sscan(args[1], &getReq.Version)
@ -231,7 +232,7 @@ func newGetByAlias() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getByAliasReq.FullName = args[0]
getByAliasReq.Alias = args[1]
@ -307,7 +308,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.FullName = args[0]
@ -374,7 +375,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -5,6 +5,7 @@ package notification_destinations
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -81,7 +82,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -147,7 +148,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Id = args[0]
@ -202,7 +203,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Id = args[0]
@ -260,7 +261,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.NotificationDestinations.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -319,7 +320,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -86,7 +87,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.Table)
@ -170,7 +171,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -228,7 +229,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]

Some files were not shown because too many files have changed in this diff Show More