Merge remote-tracking branch 'origin' into bundle-exec

This commit is contained in:
Shreyas Goenka 2025-03-11 12:50:40 +01:00
commit 8e19474f52
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
246 changed files with 9438 additions and 6342 deletions

View File

@ -1,4 +1,6 @@
{
"mode": "cli_legacy",
"api_changelog": true,
"formatter": "go run golang.org/x/tools/cmd/goimports@latest -w $FILENAMES && go fmt ./...",
"services": {
".codegen/service.go.tmpl": "cmd/{{if .IsAccounts}}account{{else}}workspace{{end}}/{{(.TrimPrefix \"account\").KebabName}}/{{(.TrimPrefix \"account\").KebabName}}.go"

View File

@ -1,40 +0,0 @@
# Version changelog
## {{.Version}}
CLI:
{{- range .Changes}}
* {{.}}.
{{- end}}
Bundles:
* **FILL THIS IN MANUALLY BY MOVING RELEVANT ITEMS FROM ABOVE LIST**
Internal:
* **FILL THIS IN MANUALLY BY MOVING RELEVANT ITEMS FROM ABOVE LIST**
{{ if .ApiChanges -}}
API Changes:
{{- range .ApiChanges}}{{if or (eq .X "method") (eq .X "service")}}
* {{.Action}} {{template "what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "what" .}}{{end}}{{end}}.
{{- end}}{{- end}}
OpenAPI commit {{.Sha}} ({{.Changed}})
{{- end }}
{{- if .DependencyUpdates }}
Dependency updates:
{{- range .DependencyUpdates}}
* {{.}}.
{{- end -}}
{{end}}
## {{.PrevVersion}}
{{- define "what" -}}
{{if eq .X "service" -}}
`databricks {{if .Service.IsAccounts}}account {{end -}}{{(.Service.TrimPrefix "account").KebabName}}` command group
{{- else if eq .X "method" -}}
`databricks {{if .Method.Service.IsAccounts}}account {{end -}}{{(.Method.Service.TrimPrefix "account").KebabName}} {{.Method.KebabName}}` command
{{- end}}
{{- end -}}

View File

@ -4,6 +4,7 @@ package {{(.TrimPrefix "account").SnakeName}}
import (
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/cmd/root"
@ -240,7 +241,7 @@ func new{{.PascalName}}() *cobra.Command {
cmd.PreRunE = root.Must{{if .Service.IsAccounts}}Account{{else}}Workspace{{end}}Client
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := command.WorkspaceClient(ctx){{end}}
{{- if .Request }}
{{ if $canUseJson }}
if cmd.Flags().Changed("json") {

View File

@ -2,9 +2,11 @@
<!-- Brief summary of your changes that is easy to understand -->
## Why
<!-- Why are these changes needed? Provide the context that the reviewer might be missing.
<!-- Why are these changes needed? Provide the context that the reviewer might be missing.
For example, were there any decisions behind the change that are not reflected in the code itself? -->
## Tests
<!-- How have you tested the changes? -->
<!-- If your PR needs to be included in the release notes for next release,
add a separate entry in NEXT_CHANGELOG.md as part of your PR. -->

View File

@ -58,7 +58,9 @@ jobs:
python-version: '3.9'
- name: Install uv
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 # v5.3.1
with:
version: "0.6.5"
- name: Set go env
run: |
@ -145,7 +147,10 @@ jobs:
go run main.go bundle schema > schema.json
# Add markdownDescription keyword to ajv
echo "module.exports=function(a){a.addKeyword('markdownDescription')}" >> keywords.js
echo "module.exports = function(a) {
a.addKeyword('markdownDescription');
a.addKeyword('deprecationMessage');
}" >> keywords.js
for file in ./bundle/internal/schema/testdata/pass/*.yml; do
ajv test -s schema.json -d $file --valid -c=./keywords.js

61
.github/workflows/python_push.yml vendored Normal file
View File

@ -0,0 +1,61 @@
name: python build
on:
pull_request:
types: [opened, synchronize]
paths:
- experimental/python/**
merge_group:
types: [checks_requested]
paths:
- experimental/python/**
push:
# Always run on push to main. The build cache can only be reused
# if it was saved by a run from the repository's default branch.
# The run result will be identical to that from the merge queue
# because the commit is identical, yet we need to perform it to
# seed the build cache.
branches:
- main
jobs:
python_tests:
name: tests
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
pyVersion: [ '3.10', '3.11', '3.12', '3.13' ]
steps:
- name: Checkout repository and submodules
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install uv
uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 # v5.3.1
with:
python-version: ${{ matrix.python-version }}
version: "0.6.5"
- name: Run tests
working-directory: experimental/python
run: make test
python_linters:
name: lint
runs-on: ubuntu-latest
steps:
- name: Checkout repository and submodules
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install uv
uses: astral-sh/setup-uv@f94ec6bedd8674c4426838e6b50417d36b6ab231 # v5.3.1
with:
version: "0.6.5"
checksum: "8fc9895719a1291ecd193cb86f9282ff3649cef797d29eacc74c4f573aab1e2f"
- name: Run lint
working-directory: experimental/python
run: make lint

52
.github/workflows/tagging.yml vendored Normal file
View File

@ -0,0 +1,52 @@
# Generated file. DO NOT EDIT.
name: tagging
on:
workflow_dispatch:
# Enable for automatic tagging
#schedule:
# - cron: '0 0 * * TUE'
# Ensure that only a single instance of the workflow is running at a time.
concurrency:
group: "tagging"
jobs:
tag:
environment: "release-is"
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
steps:
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.DECO_SDK_TAGGING_APP_ID }}
private-key: ${{ secrets.DECO_SDK_TAGGING_PRIVATE_KEY }}
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ steps.generate-token.outputs.token }}
#NOTE: email must be the GitHub App email or the commit will not be verified.
- name: Set up Git configuration
run: |
git config user.name "Databricks SDK Release Bot"
git config user.email "DECO-SDK-Tagging[bot]@users.noreply.github.com"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install PyGithub
- name: Run script
env:
GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
GITHUB_REPOSITORY: ${{ github.repository }}
run: |
python tagging.py

1
.package.json Normal file
View File

@ -0,0 +1 @@
{}

View File

@ -1,5 +1,29 @@
# Version changelog
## [Release] Release v0.243.0
CLI:
* Upgrade Go SDK to 0.59.0 ([#2425](https://github.com/databricks/cli/pull/2425)).
Bundles:
* Added a warning when `config` section is used in apps ([#2416](https://github.com/databricks/cli/pull/2416)).
* Switch to use GET workspaces-files/{name} instead of workspace/export for state files to avoid 10MB limit ([#2423](https://github.com/databricks/cli/pull/2423)).
* Use schema field for pipeline in builtin template ([#2347](https://github.com/databricks/cli/pull/2347)).
* Add warning when variable interpolation is used for auth fields ([#2399](https://github.com/databricks/cli/pull/2399)).
* Add warning when include is used in config files other than databricks.yml ([#2389](https://github.com/databricks/cli/pull/2389)).
* Add support for schemas in deployment bind/unbind commands ([#2406](https://github.com/databricks/cli/pull/2406)).
* Do not modify/create .gitignore in bundle root ([#2429](https://github.com/databricks/cli/pull/2429)).
* Raise an error when there are multiple local libraries with the same basename used ([#2382](https://github.com/databricks/cli/pull/2382)).
* Upgrade TF provider to 1.68.0 ([#2426](https://github.com/databricks/cli/pull/2426)).
API Changes:
* Changed `databricks experiments log-inputs` command with new required argument order.
* Added `databricks genie get-space` command.
* Added `databricks providers list-provider-share-assets` command.
* Changed `databricks shares update-permissions` command return type to become non-empty.
OpenAPI commit e5c870006a536121442cfd2441bdc8a5fb76ae1e (2025-03-03)
## [Release] Release v0.242.0
Notable changes:

12
NEXT_CHANGELOG.md Normal file
View File

@ -0,0 +1,12 @@
# NEXT CHANGELOG
## Release v0.243.1
### CLI
* Fixed "can't evaluate field Name in type interface{}" for "databricks queries list" command ([#2451](https://github.com/databricks/cli/pull/2451))
### Bundles
### Internal
### API Changes

View File

@ -223,6 +223,10 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
if !isTruePtr(config.Cloud) && cloudEnv != "" {
t.Skipf("Disabled via Cloud setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
} else {
if isTruePtr(config.RequiresUnityCatalog) && os.Getenv("TEST_METASTORE_ID") == "" {
t.Skipf("Skipping on non-UC workspaces")
}
}
var tmpDir string

17
acceptance/bin/setmtime.py Executable file
View File

@ -0,0 +1,17 @@
#!/usr/bin/env python3
"""
Cross-platform set mtime with nanosecond precision.
Usage: setmtime.py <timestamp> <filenames>
"""
import sys
import os
import datetime
timestamp = sys.argv[1]
ts, ns = timestamp.split(".")
dt = datetime.datetime.strptime(ts, "%Y-%m-%d %H:%M:%S").replace(tzinfo=datetime.timezone.utc)
ns = int(ns.ljust(9, "0"))
ts = int(dt.timestamp()) * 10**9 + ns
for filename in sys.argv[2:]:
os.utime(filename, ns=(ts, ts))

View File

@ -0,0 +1,10 @@
bundle:
name: bind-schema-test-$BUNDLE_NAME_SUFFIX
resources:
schemas:
schema1:
name: $SCHEMA_NAME
catalog_name: main
comment: This schema was created from DABs

View File

@ -0,0 +1,38 @@
=== Bind schema test:
=== Substitute variables in the template:
=== Create a pre-defined schema: {
"full_name": "main.test-schema-[UUID]",
"catalog_name": "main",
"comment": null
}
=== Bind schema: Updating deployment state...
Successfully bound databricks_schema with an id 'main.test-schema-[UUID]'. Run 'bundle deploy' to deploy changes to your workspace
=== Deploy bundle: Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/bind-schema-test-[UUID]/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
=== Read the pre-defined schema: {
"full_name": "main.test-schema-[UUID]",
"catalog_name": "main",
"comment": "This schema was created from DABs"
}
=== Unbind the schema: Updating deployment state...
=== Destroy the bundle: All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/bind-schema-test-[UUID]/default
Deleting files...
Destroy complete!
=== Read the pre-defined schema again (expecting it still exists): {
"full_name": "main.test-schema-[UUID]",
"catalog_name": "main",
"comment": "This schema was created from DABs"
}
=== Test cleanup:
=== Delete the pre-defined schema test-schema-[UUID]: 0

View File

@ -0,0 +1,36 @@
title "Bind schema test: "
title "Substitute variables in the template: "
export BUNDLE_NAME_SUFFIX=$(uuid)
export SCHEMA_NAME="test-schema-$(uuid)"
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
title "Create a pre-defined schema: "
CATALOG_NAME=main
$CLI schemas create ${SCHEMA_NAME} ${CATALOG_NAME} | jq '{full_name, catalog_name, comment}'
cleanupRemoveSchema() {
title "Test cleanup: "
title "Delete the pre-defined schema ${SCHEMA_NAME}: "
$CLI schemas delete ${CATALOG_NAME}.${SCHEMA_NAME}
echo $?
}
trap cleanupRemoveSchema EXIT
title "Bind schema: "
$CLI bundle deployment bind schema1 ${CATALOG_NAME}.${SCHEMA_NAME} --auto-approve
title "Deploy bundle: "
$CLI bundle deploy --force-lock --auto-approve
title "Read the pre-defined schema: "
$CLI schemas get ${CATALOG_NAME}.${SCHEMA_NAME} | jq '{full_name, catalog_name, comment}'
title "Unbind the schema: "
$CLI bundle deployment unbind schema1
title "Destroy the bundle: "
$CLI bundle destroy --auto-approve
title "Read the pre-defined schema again (expecting it still exists): "
$CLI schemas get ${CATALOG_NAME}.${SCHEMA_NAME} | jq '{full_name, catalog_name, comment}'

View File

@ -0,0 +1,3 @@
Local = false
Cloud = true
RequiresUnityCatalog = true

View File

@ -6,3 +6,4 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore
rm .databricks/.gitignore

View File

@ -48,7 +48,7 @@
- catalog: main
+ ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
+ # catalog: catalog_name
target: my_default_python_${bundle.target}
schema: my_default_python_${bundle.target}
- serverless: true
libraries:
- notebook:

View File

@ -5,7 +5,7 @@ resources:
name: my_default_python_pipeline
## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
# catalog: catalog_name
target: my_default_python_${bundle.target}
schema: my_default_python_${bundle.target}
libraries:
- notebook:
path: ../src/dlt_pipeline.ipynb

View File

@ -6,6 +6,7 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore
rm .databricks/.gitignore
cd ../../

View File

@ -18,5 +18,5 @@ See also the documentation at https://docs.databricks.com/dev-tools/bundles/inde
- ## Catalog is required for serverless compute
- catalog: main
+ catalog: customcatalog
target: my_default_python_${bundle.target}
schema: my_default_python_${bundle.target}
serverless: true

View File

@ -5,7 +5,7 @@ resources:
name: my_default_python_pipeline
## Catalog is required for serverless compute
catalog: main
target: my_default_python_${bundle.target}
schema: my_default_python_${bundle.target}
serverless: true
libraries:
- notebook:

View File

@ -6,3 +6,4 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore
rm .databricks/.gitignore

View File

@ -6,3 +6,6 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore
# Only for this test (default-sql), record .databricks/.gitignore in the output
mv .databricks/.gitignore .databricks/out.gitignore

View File

@ -11,3 +11,4 @@ rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore
rm .databricks/.gitignore

View File

@ -17,6 +17,13 @@ variables:
cluster1:
type: complex
description: "A cluster definition"
default:
spark_version: "default-version"
node_type_id: "default-node-type"
num_workers: 40
spark_conf:
spark.databricks.delta.retentionDurationCheck.enabled: true
spark.other.variable.shouldNotBeSeen: true
cluster2:
type: complex
description: "A cluster definition"

View File

@ -0,0 +1,32 @@
=== Test prebuilt wheel:
>>> setmtime.py 2025-03-05 15:07:33.123456700 my_test_code-0.0.1-py3-none-any.whl
>>> [CLI] selftest patchwhl my_test_code-0.0.1-py3-none-any.whl
Warn: Patched whl: my_test_code-0.0.1-py3-none-any.whl -> my_test_code-0.0.1+1741187253123456700-py3-none-any.whl
>>> diff.py original output
--- original/my_test_code-0.0.1+1741187253123456700.dist-info/METADATA
+++ output/my_test_code-0.0.1+1741187253123456700.dist-info/METADATA
@@ -1,5 +1,5 @@
Metadata-Version: 2.1
Name: my-test-code
-Version: 0.0.1
+Version: 0.0.1+1741187253123456700
Summary: my test wheel
Home-page: https://databricks.com
--- original/my_test_code-0.0.1+1741187253123456700.dist-info/RECORD
+++ output/my_test_code-0.0.1+1741187253123456700.dist-info/RECORD
@@ -1,7 +1,7 @@
src/__init__.py,sha256=BRmKeYehopKv4NG_SFa7t6wn248RrPHJivu7DM1R-Rw,48
src/__main__.py,sha256=8TtsnLsaJEM35Y4L8ocrv-qfxusgYpRL2HPyYiabHng,242
-my_test_code-0.0.1.dist-info/METADATA,sha256=6fyVq4hexXGUP_J2mB1CI-ijZ6CenvKNIlHx0bKPRJ0,197
-my_test_code-0.0.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
-my_test_code-0.0.1.dist-info/entry_points.txt,sha256=oDWOW9SsBlk4Uejj1ftYPBxfhJ5ZJctb4JOUIG1rc-4,34
-my_test_code-0.0.1.dist-info/top_level.txt,sha256=74rtVfumQlgAPzR5_2CgYN24MB0XARCg0t-gzk6gTrM,4
-my_test_code-0.0.1.dist-info/RECORD,,
+my_test_code-0.0.1+1741187253123456700.dist-info/METADATA,sha256=H99P2vEwB_hBVPNtPwsXZotaDQzmWEGeSlOtMzWe62U,217
+my_test_code-0.0.1+1741187253123456700.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
+my_test_code-0.0.1+1741187253123456700.dist-info/entry_points.txt,sha256=oDWOW9SsBlk4Uejj1ftYPBxfhJ5ZJctb4JOUIG1rc-4,34
+my_test_code-0.0.1+1741187253123456700.dist-info/top_level.txt,sha256=74rtVfumQlgAPzR5_2CgYN24MB0XARCg0t-gzk6gTrM,4
+my_test_code-0.0.1+1741187253123456700.dist-info/RECORD,,

View File

@ -0,0 +1,9 @@
Metadata-Version: 2.1
Name: my-test-code
Version: 0.0.1+1741187253123456700
Summary: my test wheel
Home-page: https://databricks.com
Author: Databricks
Author-email: john.doe@databricks.com
Requires-Dist: setuptools

View File

@ -0,0 +1,7 @@
src/__init__.py,sha256=BRmKeYehopKv4NG_SFa7t6wn248RrPHJivu7DM1R-Rw,48
src/__main__.py,sha256=8TtsnLsaJEM35Y4L8ocrv-qfxusgYpRL2HPyYiabHng,242
my_test_code-0.0.1+1741187253123456700.dist-info/METADATA,sha256=H99P2vEwB_hBVPNtPwsXZotaDQzmWEGeSlOtMzWe62U,217
my_test_code-0.0.1+1741187253123456700.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
my_test_code-0.0.1+1741187253123456700.dist-info/entry_points.txt,sha256=oDWOW9SsBlk4Uejj1ftYPBxfhJ5ZJctb4JOUIG1rc-4,34
my_test_code-0.0.1+1741187253123456700.dist-info/top_level.txt,sha256=74rtVfumQlgAPzR5_2CgYN24MB0XARCg0t-gzk6gTrM,4
my_test_code-0.0.1+1741187253123456700.dist-info/RECORD,,

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -0,0 +1,2 @@
[group_1]
run = src.__main__:main

View File

@ -0,0 +1,2 @@
__version__ = "0.0.1"
__author__ = "Databricks"

View File

@ -0,0 +1,16 @@
"""
The entry point of the Python Wheel
"""
import sys
def main():
# This method will print the provided arguments
print('Hello from my func')
print('Got arguments:')
print(sys.argv)
if __name__ == '__main__':
main()

View File

@ -0,0 +1 @@
exclude = ["output"]

View File

@ -0,0 +1,14 @@
title "Test prebuilt wheel:"
trace setmtime.py "2025-03-05 15:07:33.123456700" my_test_code-0.0.1-py3-none-any.whl
trace $CLI selftest patchwhl my_test_code-0.0.1-py3-none-any.whl
mkdir output original
unzip -q my_test_code-0.0.1+1*-py3-none-any.whl -d output
unzip -q my_test_code-0.0.1-py3-none-any.whl -d original
rm my_test_code-0.0.1+1*-py3-none-any.whl
# rename directory to match so that we can compare contents
mv original/my_test_code-0.0.1.dist-info original/my_test_code-0.0.1+1741187253123456700.dist-info
trace diff.py original output
rm -fr original

View File

@ -0,0 +1,5 @@
>>> [CLI] queries list
ID Name Author
[UUID] Example query 1 user@acme.com
[UUID] Example query 2 user@acme.com

View File

@ -0,0 +1 @@
trace $CLI queries list

View File

@ -0,0 +1,58 @@
[[Server]]
Pattern = "GET /api/2.0/sql/queries"
Response.Body = '''
{
"results": [
{
"description": "Example description",
"owner_user_name": "user@acme.com",
"tags": [
"Tag 1"
],
"display_name": "Example query 1",
"id": "ae25e731-92f2-4838-9fb2-1ca364320a3d",
"lifecycle_state": "ACTIVE",
"last_modifier_user_name": "user@acme.com",
"query_text": "SELECT 1",
"parameters": [
{
"name": "foo",
"text_value": {
"value": "bar"
},
"title": "foo"
}
],
"update_time": "2019-08-24T14:15:22Z",
"warehouse_id": "a7066a8ef796be84",
"run_as_mode": "OWNER",
"create_time": "2019-08-24T14:15:22Z"
},
{
"description": "Example description",
"owner_user_name": "user@acme.com",
"tags": [
"Tag 1"
],
"display_name": "Example query 2",
"id": "be25e731-92f2-4838-9fb2-1ca364320a3d",
"lifecycle_state": "ACTIVE",
"last_modifier_user_name": "user@acme.com",
"query_text": "SELECT 1",
"parameters": [
{
"name": "foo",
"text_value": {
"value": "bar"
},
"title": "foo"
}
],
"update_time": "2019-08-24T14:15:22Z",
"warehouse_id": "a7066a8ef796be84",
"run_as_mode": "OWNER",
"create_time": "2019-08-24T14:15:22Z"
}
]
}
'''

View File

@ -31,6 +31,9 @@ type TestConfig struct {
// If true, run this test when running with cloud env configured
Cloud *bool
// If true and Cloud=true, run this test only if unity catalog is available in the cloud environment
RequiresUnityCatalog *bool
// List of additional replacements to apply on this test.
// Old is a regexp, New is a replacement expression.
Repls []testdiff.Replacement

View File

@ -59,3 +59,7 @@ withdir() {
cd "$orig_dir" || return $?
return $exit_code
}
uuid() {
python3 -c 'import uuid; print(uuid.uuid4())'
}

View File

@ -21,6 +21,7 @@ import (
"github.com/databricks/cli/libs/fileset"
"github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log"
libsync "github.com/databricks/cli/libs/sync"
"github.com/databricks/cli/libs/tags"
"github.com/databricks/cli/libs/terraform"
"github.com/databricks/cli/libs/vfs"
@ -198,6 +199,7 @@ func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error)
return "", err
}
libsync.WriteGitIgnore(ctx, b.BundleRootPath)
return dir, nil
}

View File

@ -36,13 +36,13 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
}{
{
name: "preset enabled, bundle in Workspace, databricks runtime",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
initialValue: &enabled,
expectedValue: &enabled,
},
{
name: "preset enabled, bundle not in Workspace, databricks runtime",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
mutateBundle: func(b *bundle.Bundle) {
b.SyncRootPath = "/Users/user.name@company.com"
},
@ -52,26 +52,26 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
},
{
name: "preset enabled, bundle in Workspace, not databricks runtime",
ctx: dbr.MockRuntime(testContext, false),
ctx: dbr.MockRuntime(testContext, dbr.Environment{}),
initialValue: &enabled,
expectedValue: &disabled,
expectedWarning: "source-linked deployment is available only in the Databricks Workspace",
},
{
name: "preset disabled, bundle in Workspace, databricks runtime",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
initialValue: &disabled,
expectedValue: &disabled,
},
{
name: "preset nil, bundle in Workspace, databricks runtime",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
initialValue: nil,
expectedValue: nil,
},
{
name: "preset nil, dev mode true, bundle in Workspace, databricks runtime",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
mutateBundle: func(b *bundle.Bundle) {
b.Config.Bundle.Mode = config.Development
},
@ -80,7 +80,7 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
},
{
name: "preset enabled, workspace.file_path is defined by user",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
mutateBundle: func(b *bundle.Bundle) {
b.Config.Workspace.FilePath = "file_path"
},
@ -90,7 +90,7 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
},
{
name: "preset enabled, apps is defined by user",
ctx: dbr.MockRuntime(testContext, true),
ctx: dbr.MockRuntime(testContext, dbr.Environment{IsDbr: true, Version: "15.4"}),
mutateBundle: func(b *bundle.Bundle) {
b.Config.Resources.Apps = map[string]*resources.App{
"app": {},

View File

@ -47,7 +47,7 @@ func TestConfigureWSFS_SkipsIfNotRunningOnRuntime(t *testing.T) {
originalSyncRoot := b.SyncRoot
ctx := context.Background()
ctx = dbr.MockRuntime(ctx, false)
ctx = dbr.MockRuntime(ctx, dbr.Environment{})
diags := bundle.Apply(ctx, b, mutator.ConfigureWSFS())
assert.Empty(t, diags)
assert.Equal(t, originalSyncRoot, b.SyncRoot)
@ -58,7 +58,7 @@ func TestConfigureWSFS_SwapSyncRoot(t *testing.T) {
originalSyncRoot := b.SyncRoot
ctx := context.Background()
ctx = dbr.MockRuntime(ctx, true)
ctx = dbr.MockRuntime(ctx, dbr.Environment{IsDbr: true, Version: "15.4"})
diags := bundle.Apply(ctx, b, mutator.ConfigureWSFS())
assert.Empty(t, diags)
assert.NotEqual(t, originalSyncRoot, b.SyncRoot)

View File

@ -52,7 +52,7 @@ type Root struct {
Targets map[string]*Target `json:"targets,omitempty"`
// DEPRECATED. Left for backward compatibility with Targets
Environments map[string]*Target `json:"environments,omitempty" bundle:"deprecated"`
Environments map[string]*Target `json:"environments,omitempty"`
// Sync section specifies options for files synchronization
Sync Sync `json:"sync,omitempty"`

View File

@ -7,6 +7,7 @@ import (
"path"
"reflect"
"strings"
"time"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation"
@ -43,7 +44,7 @@ func main() {
[]string{path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, rootFileName),
reflect.TypeOf(config.Root{}),
string(rootHeader),
fillTemplateVariables(string(rootHeader)),
)
if err != nil {
log.Fatal(err)
@ -56,7 +57,7 @@ func main() {
[]string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, resourcesFileName),
reflect.TypeOf(config.Resources{}),
string(resourcesHeader),
fillTemplateVariables(string(resourcesHeader)),
)
if err != nil {
log.Fatal(err)
@ -133,3 +134,8 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
s.Examples = []string{a.MarkdownExamples}
}
}
func fillTemplateVariables(s string) string {
currentDate := time.Now().Format("2006-01-02")
return strings.ReplaceAll(s, "{{update_date}}", currentDate)
}

View File

@ -12,7 +12,7 @@ func buildMarkdown(nodes []rootNode, outputFile, header string) error {
m = m.PlainText(header)
for _, node := range nodes {
m = m.LF()
title := escapeBrackets(node.Title)
title := node.Title
if node.TopLevel {
m = m.H2(title)
} else {
@ -68,21 +68,24 @@ func pickLastWord(s string) string {
// Build a custom table which we use in Databricks website
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
m = m.LF()
m = m.PlainText(".. list-table::")
m = m.PlainText(" :header-rows: 1")
m = m.PlainText(":::list-table")
m = m.LF()
m = m.PlainText(" * - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.PlainText("- - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.LF()
for _, a := range attributes {
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.PlainText("- - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.LF()
}
m = m.PlainText(":::")
m = m.LF()
return m
}
@ -94,7 +97,7 @@ func formatDescription(a attributeNode) string {
} else if s != "" {
s += ". "
}
s += fmt.Sprintf("See [_](#%s).", cleanAnchor(a.Link))
s += fmt.Sprintf("See [\\_](#%s).", cleanAnchor(a.Link))
}
return s
}
@ -102,15 +105,7 @@ func formatDescription(a attributeNode) string {
// Docs framework does not allow special characters in anchor links and strip them out by default
// We need to clean them up to make sure the links pass the validation
func cleanAnchor(s string) string {
s = strings.ReplaceAll(s, "<", "")
s = strings.ReplaceAll(s, ">", "")
s = strings.ReplaceAll(s, ".", "")
return s
}
func escapeBrackets(s string) string {
s = strings.ReplaceAll(s, "<", "\\<")
s = strings.ReplaceAll(s, ">", "\\>")
s = strings.ReplaceAll(s, nameFieldWithFormat, nameField)
return s
}

View File

@ -20,12 +20,12 @@ func TestBuildMarkdownAnchors(t *testing.T) {
Title: "my_attribute",
Type: "Map",
Description: "Desc with link",
Link: "some_field.<name>.my_attribute",
Link: "some_field._name_.my_attribute",
},
},
},
{
Title: "some_field.<name>.my_attribute",
Title: "some_field._name_.my_attribute",
TopLevel: false,
Type: "Boolean",
Description: "Another description",

View File

@ -137,8 +137,13 @@ func getMapValueType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema) *
return nil
}
const (
nameField = "name"
nameFieldWithFormat = "_name_"
)
func getMapKeyPrefix(s string) string {
return s + ".<name>"
return s + "." + nameFieldWithFormat
}
func removePluralForm(s string) string {

View File

@ -93,11 +93,11 @@ func TestBuildNodes_ChildExpansion(t *testing.T) {
TopLevel: true,
Type: "Map",
ObjectKeyAttributes: []attributeNode{
{Title: "mapSub", Type: "Map", Link: "myMap.<name>.mapSub"},
{Title: "mapSub", Type: "Map", Link: "myMap._name_.mapSub"},
},
},
{
Title: "myMap.<name>.mapSub",
Title: "myMap._name_.mapSub",
Type: "Map",
Attributes: []attributeNode{
{Title: "deepSub", Type: "Boolean"},

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
---
description: 'Configuration reference for databricks.yml'
last_update:
date: 2025-02-14
date: {{update_date}}
---
<!--DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli-->

View File

@ -1,7 +1,7 @@
---
description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.'
last_update:
date: 2025-02-14
date: {{update_date}}
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->

View File

@ -8,19 +8,20 @@ This is a description
.. list-table::
:header-rows: 1
:::list-table
* - Key
- Type
- Description
- - Key
- Type
- Description
* - `my_attribute`
- Map
- Desc with link. See [_](#some_fieldnamemy_attribute).
- - `my_attribute`
- Map
- Desc with link. See [\_](#some_fieldnamemy_attribute).
:::
### some_field.\<name\>.my_attribute
### some_field._name_.my_attribute
**`Type: Boolean`**

View File

@ -7,6 +7,7 @@ type Descriptor struct {
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
MarkdownExamples string `json:"markdown_examples,omitempty"`
DeprecationMessage string `json:"deprecation_message,omitempty"`
}
const Placeholder = "PLACEHOLDER"

View File

@ -127,6 +127,12 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
if a.Default != nil {
s.Default = a.Default
}
if a.DeprecationMessage != "" {
s.Deprecated = true
s.DeprecationMessage = a.DeprecationMessage
}
s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription)
s.Title = a.Title
s.Enum = a.Enum

View File

@ -61,6 +61,8 @@ github.com/databricks/cli/bundle/config.Experimental:
"pydabs":
"description": |-
The PyDABs configuration.
"deprecation_message": |-
Deprecated: please use python instead
"python":
"description": |-
Configures loading of Python code defined with 'databricks-bundles' package.
@ -186,9 +188,9 @@ github.com/databricks/cli/bundle/config.Resources:
The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [_](/dev-tools/bundles/resources.md#quality_monitors).
"registered_models":
"description": |-
The registered model definitions for the bundle, where each key is the name of the <UC> registered model.
The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.
"markdown_description": |-
The registered model definitions for the bundle, where each key is the name of the <UC> registered model. See [_](/dev-tools/bundles/resources.md#registered_models).
The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [_](/dev-tools/bundles/resources.md#registered_models)
"schemas":
"description": |-
The schema definitions for the bundle, where each key is the name of the schema.
@ -220,6 +222,11 @@ github.com/databricks/cli/bundle/config.Root:
The bundle attributes when deploying to this target.
"markdown_description": |-
The bundle attributes when deploying to this target,
"environments":
"description": |-
PLACEHOLDER
"deprecation_message": |-
Deprecated: please use targets instead
"experimental":
"description": |-
Defines attributes for experimental features.
@ -254,7 +261,7 @@ github.com/databricks/cli/bundle/config.Root:
"description": |-
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.
"markdown_description": |-
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about <DABS> supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
```yaml
resources:
@ -264,9 +271,9 @@ github.com/databricks/cli/bundle/config.Root:
```
"run_as":
"description": |-
The identity to use when running <DABS> workflows.
The identity to use when running Databricks Asset Bundles workflows.
"markdown_description": |-
The identity to use when running <DABS> workflows. See [_](/dev-tools/bundles/run-as.md).
The identity to use when running Databricks Asset Bundles workflows. See [_](/dev-tools/bundles/run-as.md).
"sync":
"description": |-
The files and file paths to include or exclude in the bundle.
@ -308,6 +315,8 @@ github.com/databricks/cli/bundle/config.Target:
"compute_id":
"description": |-
Deprecated. The ID of the compute to use for this target.
"deprecation_message": |-
Deprecated: please use cluster_id instead
"default":
"description": |-
Whether this target is the default target.

View File

@ -140,7 +140,7 @@ github.com/databricks/cli/bundle/config/resources.Dashboard:
github.com/databricks/cli/bundle/config/resources.Job:
"_":
"markdown_description": |-
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a <DABS> template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
"markdown_examples": |-
The following example defines a job with the resource key `hello-job` with one notebook task:
@ -188,7 +188,7 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
github.com/databricks/cli/bundle/config/resources.MlflowModel:
"_":
"markdown_description": |-
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use <UC> [registered models](#registered-model) instead.
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](#registered-model) instead.
"permissions":
"description": |-
PLACEHOLDER
@ -197,7 +197,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"markdown_description": |-
The model_serving_endpoint resource allows you to define [model serving endpoints](/api/workspace/servingendpoints/create). See [_](/machine-learning/model-serving/manage-serving-endpoints.md).
"markdown_examples": |-
The following example defines a <UC> model serving endpoint:
The following example defines a Unity Catalog model serving endpoint:
```yaml
resources:
@ -224,7 +224,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
github.com/databricks/cli/bundle/config/resources.Pipeline:
"_":
"markdown_description": |-
The pipeline resource allows you to create <DLT> [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the <DABS> template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
The pipeline resource allows you to create Delta Live Tables [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/dlt/index.md). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
"markdown_examples": |-
The following example defines a pipeline with the resource key `hello-pipeline`:
@ -257,7 +257,7 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"_":
"markdown_description": |-
The quality_monitor resource allows you to define a <UC> [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
The quality_monitor resource allows you to define a Unity Catalog [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
"markdown_examples": |-
The following example defines a quality monitor:
@ -285,9 +285,9 @@ github.com/databricks/cli/bundle/config/resources.QualityMonitor:
github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"_":
"markdown_description": |-
The registered model resource allows you to define models in <UC>. For information about <UC> [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
"markdown_examples": |-
The following example defines a registered model in <UC>:
The following example defines a registered model in Unity Catalog:
```yaml
resources:
@ -308,12 +308,12 @@ github.com/databricks/cli/bundle/config/resources.RegisteredModel:
github.com/databricks/cli/bundle/config/resources.Schema:
"_":
"markdown_description": |-
The schema resource type allows you to define <UC> [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
The schema resource type allows you to define Unity Catalog [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.
- Only fields supported by the corresponding [Schemas object create API](/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](/api/workspace/schemas/update).
"markdown_examples": |-
The following example defines a pipeline with the resource key `my_pipeline` that creates a <UC> schema with the key `my_schema` as the target:
The following example defines a pipeline with the resource key `my_pipeline` that creates a Unity Catalog schema with the key `my_schema` as the target:
```yaml
resources:
@ -334,9 +334,9 @@ github.com/databricks/cli/bundle/config/resources.Schema:
comment: This schema was created by DABs.
```
A top-level grants mapping is not supported by <DABS>, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
A top-level grants mapping is not supported by Databricks Asset Bundles, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
The following example defines a <UC> schema with grants:
The following example defines a Unity Catalog schema with grants:
```yaml
resources:
@ -361,13 +361,13 @@ github.com/databricks/cli/bundle/config/resources.Schema:
github.com/databricks/cli/bundle/config/resources.Volume:
"_":
"markdown_description": |-
The volume resource type allows you to define and create <UC> [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
The volume resource type allows you to define and create Unity Catalog [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets).
"markdown_examples": |-
The following example creates a <UC> volume with the key `my_volume`:
The following example creates a Unity Catalog volume with the key `my_volume`:
```yaml
resources:
@ -378,7 +378,7 @@ github.com/databricks/cli/bundle/config/resources.Volume:
schema_name: my_schema
```
For an example bundle that runs a job that writes to a file in <UC> volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
For an example bundle that runs a job that writes to a file in Unity Catalog volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
"grants":
"description": |-
PLACEHOLDER
@ -579,3 +579,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
"model_version":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
"abfss":
"description": |-
Contains the Azure Data Lake Storage destination path
github.com/databricks/databricks-sdk-go/service/compute.Environment:
"dependencies":
"description": |-
List of pip dependencies, as supported by the version of pip in this environment.
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
"granularities":
"description": |-
Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
"granularities":
"description": |-
Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
"log_analytics_primary_key":
"description": |-
The primary key for the Azure Log Analytics agent configuration
"log_analytics_workspace_id":
"description": |-
The workspace ID for the Azure Log Analytics agent configuration

View File

@ -459,7 +459,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a \u003cDABS\u003e template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
},
{
"type": "string",
@ -552,7 +552,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use \u003cUC\u003e [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
},
{
"type": "string",
@ -743,7 +743,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The pipeline resource allows you to create \u003cDLT\u003e [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the \u003cDABS\u003e template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
"markdownDescription": "The pipeline resource allows you to create Delta Live Tables [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/dlt/index.html). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
},
{
"type": "string",
@ -818,7 +818,7 @@
"assets_dir",
"output_schema_name"
],
"markdownDescription": "The quality_monitor resource allows you to define a \u003cUC\u003e [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
"markdownDescription": "The quality_monitor resource allows you to define a Unity Catalog [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
},
{
"type": "string",
@ -861,7 +861,7 @@
"name",
"schema_name"
],
"markdownDescription": "The registered model resource allows you to define models in \u003cUC\u003e. For information about \u003cUC\u003e [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
"markdownDescription": "The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
},
{
"type": "string",
@ -902,7 +902,7 @@
"catalog_name",
"name"
],
"markdownDescription": "The schema resource type allows you to define \u003cUC\u003e [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
"markdownDescription": "The schema resource type allows you to define Unity Catalog [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
},
{
"type": "string",
@ -948,7 +948,7 @@
"name",
"schema_name"
],
"markdownDescription": "The volume resource type allows you to define and create \u003cUC\u003e [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use \u003cDABS\u003e to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
"markdownDescription": "The volume resource type allows you to define and create Unity Catalog [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
},
{
"type": "string",
@ -1217,7 +1217,9 @@
"properties": {
"pydabs": {
"description": "The PyDABs configuration.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs"
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs",
"deprecationMessage": "Deprecated: please use python instead",
"deprecated": true
},
"python": {
"description": "Configures loading of Python code defined with 'databricks-bundles' package.",
@ -1438,9 +1440,9 @@
"markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)."
},
"registered_models": {
"description": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model.",
"description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel",
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)."
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
},
"schemas": {
"description": "The schema definitions for the bundle, where each key is the name of the schema.",
@ -1506,7 +1508,9 @@
},
"compute_id": {
"description": "Deprecated. The ID of the compute to use for this target.",
"$ref": "#/$defs/string"
"$ref": "#/$defs/string",
"deprecationMessage": "Deprecated: please use cluster_id instead",
"deprecated": true
},
"default": {
"description": "Whether this target is the default target.",
@ -2122,7 +2126,7 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.",
"$ref": "#/$defs/slice/string"
},
"label_col": {
@ -2279,7 +2283,7 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.",
"$ref": "#/$defs/slice/string"
},
"timestamp_col": {
@ -2764,7 +2768,7 @@
"$ref": "#/$defs/string"
},
"dependencies": {
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]",
"description": "List of pip dependencies, as supported by the version of pip in this environment.",
"$ref": "#/$defs/slice/string"
}
},
@ -2860,7 +2864,7 @@
"type": "object",
"properties": {
"abfss": {
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }",
"description": "Contains the Azure Data Lake Storage destination path",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
},
"dbfs": {
@ -2968,11 +2972,11 @@
"type": "object",
"properties": {
"log_analytics_primary_key": {
"description": "\u003cneeds content added\u003e",
"description": "The primary key for the Azure Log Analytics agent configuration",
"$ref": "#/$defs/string"
},
"log_analytics_workspace_id": {
"description": "\u003cneeds content added\u003e",
"description": "The workspace ID for the Azure Log Analytics agent configuration",
"$ref": "#/$defs/string"
}
},
@ -7409,6 +7413,11 @@
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle",
"markdownDescription": "The bundle attributes when deploying to this target,"
},
"environments": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target",
"deprecationMessage": "Deprecated: please use targets instead",
"deprecated": true
},
"experimental": {
"description": "Defines attributes for experimental features.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental"
@ -7431,12 +7440,12 @@
"resources": {
"description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about \u003cDABS\u003e supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
},
"run_as": {
"description": "The identity to use when running \u003cDABS\u003e workflows.",
"description": "The identity to use when running Databricks Asset Bundles workflows.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use when running \u003cDABS\u003e workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
"markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The files and file paths to include or exclude in the bundle.",

View File

@ -69,7 +69,7 @@ func initializeTarget(t *testing.T, path, env string) (*bundle.Bundle, diag.Diag
b := load(t, path)
configureMock(t, b)
ctx := dbr.MockRuntime(context.Background(), false)
ctx := dbr.MockRuntime(context.Background(), dbr.Environment{})
diags := bundle.Apply(ctx, b, mutator.SelectTarget(env))
diags = diags.Extend(phases.Initialize(ctx, b))

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/config"
"github.com/spf13/cobra"
@ -109,7 +110,7 @@ func getAuthStatus(cmd *cobra.Command, args []string, showSensitive bool, fn try
return &status, nil
}
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
me, err := w.CurrentUser.Me(ctx)
if err != nil {
return &authStatus{

View File

@ -6,6 +6,7 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/iam"
@ -17,7 +18,7 @@ import (
func TestGetWorkspaceAuthStatus(t *testing.T) {
ctx := context.Background()
m := mocks.NewMockWorkspaceClient(t)
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)
@ -75,7 +76,7 @@ func TestGetWorkspaceAuthStatus(t *testing.T) {
func TestGetWorkspaceAuthStatusError(t *testing.T) {
ctx := context.Background()
m := mocks.NewMockWorkspaceClient(t)
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)
@ -124,7 +125,7 @@ func TestGetWorkspaceAuthStatusError(t *testing.T) {
func TestGetWorkspaceAuthStatusSensitive(t *testing.T) {
ctx := context.Background()
m := mocks.NewMockWorkspaceClient(t)
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)

View File

@ -8,6 +8,7 @@ import (
"time"
"github.com/databricks/cli/cmd"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/auth/cache"
"github.com/databricks/cli/libs/databrickscfg/profile"
@ -106,7 +107,7 @@ func getCobraCmdForTest(f fixtures.HTTPFixture) (*cobra.Command, *bytes.Buffer)
func TestTokenCmdWithProfilePrintsHelpfulLoginMessageOnRefreshFailure(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--profile", "expired"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -117,7 +118,7 @@ func TestTokenCmdWithProfilePrintsHelpfulLoginMessageOnRefreshFailure(t *testing
func TestTokenCmdWithHostPrintsHelpfulLoginMessageOnRefreshFailure(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--host", "https://accounts.cloud.databricks.com", "--account-id", "expired"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -128,7 +129,7 @@ func TestTokenCmdWithHostPrintsHelpfulLoginMessageOnRefreshFailure(t *testing.T)
func TestTokenCmdInvalidResponse(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureInvalidResponse)
cmd.SetArgs([]string{"auth", "token", "--profile", "active"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -139,7 +140,7 @@ func TestTokenCmdInvalidResponse(t *testing.T) {
func TestTokenCmdOtherErrorResponse(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshFailureOtherError)
cmd.SetArgs([]string{"auth", "token", "--profile", "active"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
assert.Empty(t, out)
@ -150,7 +151,7 @@ func TestTokenCmdOtherErrorResponse(t *testing.T) {
func TestTokenCmdWithProfileSuccess(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshSuccessTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--profile", "active"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
validateToken(t, out)
@ -160,7 +161,7 @@ func TestTokenCmdWithProfileSuccess(t *testing.T) {
func TestTokenCmdWithHostSuccess(t *testing.T) {
cmd, output := getCobraCmdForTest(refreshSuccessTokenResponse)
cmd.SetArgs([]string{"auth", "token", "--host", "https://accounts.cloud.databricks.com", "--account-id", "expired"})
err := cmd.Execute()
err := root.Execute(cmd.Context(), cmd)
out := output.String()
validateToken(t, out)

View File

@ -8,6 +8,7 @@ import (
"testing"
"github.com/databricks/cli/cmd"
"github.com/databricks/cli/cmd/root"
"github.com/stretchr/testify/assert"
"gopkg.in/ini.v1"
)
@ -57,7 +58,7 @@ func TestDefaultConfigureNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
cfgPath := filepath.Join(tempHomeDir, ".databrickscfg")
@ -91,7 +92,7 @@ func TestConfigFileFromEnvNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)
@ -131,7 +132,7 @@ func TestEnvVarsConfigureNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)
@ -164,7 +165,7 @@ func TestEnvVarsConfigureNoArgsNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)
@ -193,7 +194,7 @@ func TestCustomProfileConfigureNoInteractive(t *testing.T) {
cmd := cmd.New(ctx)
cmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"})
err := cmd.ExecuteContext(ctx)
err := root.Execute(ctx, cmd)
assert.NoError(t, err)
_, err = os.Stat(cfgPath)

View File

@ -7,6 +7,7 @@ import (
"strings"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/filer/completer"
"github.com/spf13/cobra"
@ -35,7 +36,7 @@ func filerForPath(ctx context.Context, fullPath string) (filer.Filer, string, er
}
path := parts[1]
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
// If the specified path has the "Volumes" prefix, use the Files API.
if strings.HasPrefix(path, "/Volumes/") {

View File

@ -6,7 +6,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
@ -73,7 +73,7 @@ func mockMustWorkspaceClientFunc(cmd *cobra.Command, args []string) error {
func setupCommand(t *testing.T) (*cobra.Command, *mocks.MockWorkspaceClient) {
m := mocks.NewMockWorkspaceClient(t)
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, m.WorkspaceClient)
ctx = command.SetWorkspaceClient(ctx, m.WorkspaceClient)
cmd := &cobra.Command{}
cmd.SetContext(ctx)

View File

@ -13,25 +13,31 @@ const cacheTTL = 1 * time.Hour
// NewReleaseCache creates a release cache for a repository in the GitHub org.
// Caller has to provide different cache directories for different repositories.
func NewReleaseCache(org, repo, cacheDir string) *ReleaseCache {
func NewReleaseCache(org, repo, cacheDir string, offlineInstall bool) *ReleaseCache {
pattern := fmt.Sprintf("%s-%s-releases", org, repo)
return &ReleaseCache{
cache: localcache.NewLocalCache[Versions](cacheDir, pattern, cacheTTL),
Org: org,
Repo: repo,
cache: localcache.NewLocalCache[Versions](cacheDir, pattern, cacheTTL),
Org: org,
Repo: repo,
Offline: offlineInstall,
}
}
type ReleaseCache struct {
cache localcache.LocalCache[Versions]
Org string
Repo string
cache localcache.LocalCache[Versions]
Org string
Repo string
Offline bool
}
func (r *ReleaseCache) Load(ctx context.Context) (Versions, error) {
return r.cache.Load(ctx, func() (Versions, error) {
return getVersions(ctx, r.Org, r.Repo)
})
if !r.Offline {
return r.cache.Load(ctx, func() (Versions, error) {
return getVersions(ctx, r.Org, r.Repo)
})
}
cached, err := r.cache.LoadCache()
return cached.Data, err
}
// getVersions is considered to be a private API, as we want the usage go through a cache

View File

@ -26,7 +26,7 @@ func TestLoadsReleasesForCLI(t *testing.T) {
ctx := context.Background()
ctx = WithApiOverride(ctx, server.URL)
r := NewReleaseCache("databricks", "cli", t.TempDir())
r := NewReleaseCache("databricks", "cli", t.TempDir(), false)
all, err := r.Load(ctx)
assert.NoError(t, err)
assert.Len(t, all, 2)

View File

@ -7,16 +7,20 @@ import (
)
func newInstallCommand() *cobra.Command {
return &cobra.Command{
Use: "install NAME",
Args: root.ExactArgs(1),
Short: "Installs project",
RunE: func(cmd *cobra.Command, args []string) error {
inst, err := project.NewInstaller(cmd, args[0])
if err != nil {
return err
}
return inst.Install(cmd.Context())
},
cmd := &cobra.Command{}
var offlineInstall bool
cmd.Flags().BoolVar(&offlineInstall, "offline", offlineInstall, `If installing in offline mode, set this flag to true.`)
cmd.Use = "install NAME"
cmd.Args = root.ExactArgs(1)
cmd.Short = "Installs project"
cmd.RunE = func(cmd *cobra.Command, args []string) error {
inst, err := project.NewInstaller(cmd, args[0], offlineInstall)
if err != nil {
return err
}
return inst.Install(cmd.Context())
}
return cmd
}

View File

@ -35,7 +35,7 @@ type LocalCache[T any] struct {
}
func (r *LocalCache[T]) Load(ctx context.Context, refresh func() (T, error)) (T, error) {
cached, err := r.loadCache()
cached, err := r.LoadCache()
if errors.Is(err, fs.ErrNotExist) {
return r.refreshCache(ctx, refresh, r.zero)
} else if err != nil {
@ -96,7 +96,7 @@ func (r *LocalCache[T]) FileName() string {
return filepath.Join(r.dir, r.name+".json")
}
func (r *LocalCache[T]) loadCache() (*cached[T], error) {
func (r *LocalCache[T]) LoadCache() (*cached[T], error) {
jsonFile := r.FileName()
raw, err := os.ReadFile(r.FileName())
if err != nil {

View File

@ -54,7 +54,7 @@ func (d *devInstallation) Install(ctx context.Context) error {
return d.Installer.runHook(d.Command)
}
func NewInstaller(cmd *cobra.Command, name string) (installable, error) {
func NewInstaller(cmd *cobra.Command, name string, offlineInstall bool) (installable, error) {
if name == "." {
wd, err := os.Getwd()
if err != nil {
@ -75,28 +75,32 @@ func NewInstaller(cmd *cobra.Command, name string) (installable, error) {
version = "latest"
}
f := &fetcher{name}
version, err := f.checkReleasedVersions(cmd, version)
version, err := f.checkReleasedVersions(cmd, version, offlineInstall)
if err != nil {
return nil, fmt.Errorf("version: %w", err)
}
prj, err := f.loadRemoteProjectDefinition(cmd, version)
prj, err := f.loadRemoteProjectDefinition(cmd, version, offlineInstall)
if err != nil {
return nil, fmt.Errorf("remote: %w", err)
}
return &installer{
Project: prj,
version: version,
cmd: cmd,
Project: prj,
version: version,
cmd: cmd,
offlineInstall: offlineInstall,
}, nil
}
func NewUpgrader(cmd *cobra.Command, name string) (*installer, error) {
f := &fetcher{name}
version, err := f.checkReleasedVersions(cmd, "latest")
version, err := f.checkReleasedVersions(cmd, "latest", false)
if err != nil {
return nil, fmt.Errorf("version: %w", err)
}
prj, err := f.loadRemoteProjectDefinition(cmd, version)
prj, err := f.loadRemoteProjectDefinition(cmd, version, false)
if err != nil {
return nil, fmt.Errorf("remote: %w", err)
}
@ -115,7 +119,7 @@ type fetcher struct {
name string
}
func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string) (string, error) {
func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string, offlineInstall bool) (string, error) {
ctx := cmd.Context()
cacheDir, err := PathInLabs(ctx, f.name, "cache")
if err != nil {
@ -123,7 +127,8 @@ func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string) (str
}
// `databricks labs isntall X` doesn't know which exact version to fetch, so first
// we fetch all versions and then pick the latest one dynamically.
versions, err := github.NewReleaseCache("databrickslabs", f.name, cacheDir).Load(ctx)
var versions github.Versions
versions, err = github.NewReleaseCache("databrickslabs", f.name, cacheDir, offlineInstall).Load(ctx)
if err != nil {
return "", fmt.Errorf("versions: %w", err)
}
@ -140,11 +145,23 @@ func (f *fetcher) checkReleasedVersions(cmd *cobra.Command, version string) (str
return version, nil
}
func (i *fetcher) loadRemoteProjectDefinition(cmd *cobra.Command, version string) (*Project, error) {
func (i *fetcher) loadRemoteProjectDefinition(cmd *cobra.Command, version string, offlineInstall bool) (*Project, error) {
ctx := cmd.Context()
raw, err := github.ReadFileFromRef(ctx, "databrickslabs", i.name, version, "labs.yml")
if err != nil {
return nil, fmt.Errorf("read labs.yml from GitHub: %w", err)
var raw []byte
var err error
if !offlineInstall {
raw, err = github.ReadFileFromRef(ctx, "databrickslabs", i.name, version, "labs.yml")
if err != nil {
return nil, fmt.Errorf("read labs.yml from GitHub: %w", err)
}
} else {
libDir, _ := PathInLabs(ctx, i.name, "lib")
fileName := filepath.Join(libDir, "labs.yml")
raw, err = os.ReadFile(fileName)
if err != nil {
return nil, fmt.Errorf("read labs.yml from local path %s: %w", libDir, err)
}
}
return readFromBytes(ctx, raw)
}

View File

@ -76,7 +76,8 @@ type installer struct {
// command instance is used for:
// - auth profile flag override
// - standard input, output, and error streams
cmd *cobra.Command
cmd *cobra.Command
offlineInstall bool
}
func (i *installer) Install(ctx context.Context) error {
@ -101,9 +102,15 @@ func (i *installer) Install(ctx context.Context) error {
} else if err != nil {
return fmt.Errorf("login: %w", err)
}
err = i.downloadLibrary(ctx)
if err != nil {
return fmt.Errorf("lib: %w", err)
if !i.offlineInstall {
err = i.downloadLibrary(ctx)
if err != nil {
return fmt.Errorf("lib: %w", err)
}
}
if _, err := os.Stat(i.LibDir()); os.IsNotExist(err) {
return fmt.Errorf("no local installation found: %w", err)
}
err = i.setupPythonVirtualEnvironment(ctx, w)
if err != nil {

View File

@ -241,6 +241,45 @@ func TestInstallerWorksForReleases(t *testing.T) {
r.RunAndExpectOutput("setting up important infrastructure")
}
func TestOfflineInstallerWorksForReleases(t *testing.T) {
// This cmd is useful in systems where there is internet restriction, the user should follow a set-up as follows:
// install a labs project on a machine which has internet
// zip and copy the file to the intended machine and
// run databricks labs install --offline=true
// it will look for the code in the same install directory and if present, install from there.
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/api/2.1/clusters/get" {
respondWithJSON(t, w, &compute.ClusterDetails{
State: compute.StateRunning,
})
return
}
t.Logf("Requested: %s", r.URL.Path)
t.FailNow()
}))
defer server.Close()
ctx := installerContext(t, server)
newHome := copyTestdata(t, "testdata/installed-in-home")
ctx = env.WithUserHomeDir(ctx, newHome)
ctx, stub := process.WithStub(ctx)
stub.WithStdoutFor(`python[\S]+ --version`, "Python 3.10.5")
// on Unix, we call `python3`, but on Windows it is `python.exe`
stub.WithStderrFor(`python[\S]+ -m venv .*/.databricks/labs/blueprint/state/venv`, "[mock venv create]")
stub.WithStderrFor(`python[\S]+ -m pip install --upgrade --upgrade-strategy eager .`, "[mock pip install]")
stub.WithStdoutFor(`python[\S]+ install.py`, "setting up important infrastructure")
// simulate the case of GitHub Actions
ctx = env.Set(ctx, "DATABRICKS_HOST", server.URL)
ctx = env.Set(ctx, "DATABRICKS_TOKEN", "...")
ctx = env.Set(ctx, "DATABRICKS_CLUSTER_ID", "installer-cluster")
ctx = env.Set(ctx, "DATABRICKS_WAREHOUSE_ID", "installer-warehouse")
r := testcli.NewRunner(t, ctx, "labs", "install", "blueprint", "--offline=true", "--debug")
r.RunAndExpectOutput("setting up important infrastructure")
}
func TestInstallerWorksForDevelopment(t *testing.T) {
defer func() {
if !t.Failed() {

View File

@ -307,7 +307,7 @@ func (p *Project) checkUpdates(cmd *cobra.Command) error {
// might not be installed yet
return nil
}
r := github.NewReleaseCache("databrickslabs", p.Name, p.CacheDir())
r := github.NewReleaseCache("databrickslabs", p.Name, p.CacheDir(), false)
versions, err := r.Load(ctx)
if err != nil {
return err

View File

@ -7,6 +7,7 @@ import (
"net/http"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/databrickscfg/profile"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/config"
@ -16,9 +17,7 @@ import (
// Placeholders to use as unique keys in context.Context.
var (
workspaceClient int
accountClient int
configUsed int
accountClient int
)
type ErrNoWorkspaceProfiles struct {
@ -119,7 +118,7 @@ func MustAccountClient(cmd *cobra.Command, args []string) error {
}
ctx := cmd.Context()
ctx = context.WithValue(ctx, &configUsed, cfg)
ctx = command.SetConfigUsed(ctx, cfg)
cmd.SetContext(ctx)
profiler := profile.GetProfiler(ctx)
@ -202,7 +201,7 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error {
}
ctx := cmd.Context()
ctx = context.WithValue(ctx, &configUsed, cfg)
ctx = command.SetConfigUsed(ctx, cfg)
cmd.SetContext(ctx)
// Try to load a bundle configuration if we're allowed to by the caller (see `./auth_options.go`).
@ -213,7 +212,7 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error {
}
if b != nil {
ctx = context.WithValue(ctx, &configUsed, b.Config.Workspace.Config())
ctx = command.SetConfigUsed(ctx, b.Config.Workspace.Config())
cmd.SetContext(ctx)
client, err := b.WorkspaceClientE()
if err != nil {
@ -229,15 +228,11 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error {
return err
}
ctx = context.WithValue(ctx, &workspaceClient, w)
ctx = command.SetWorkspaceClient(ctx, w)
cmd.SetContext(ctx)
return nil
}
func SetWorkspaceClient(ctx context.Context, w *databricks.WorkspaceClient) context.Context {
return context.WithValue(ctx, &workspaceClient, w)
}
func SetAccountClient(ctx context.Context, a *databricks.AccountClient) context.Context {
return context.WithValue(ctx, &accountClient, a)
}
@ -321,14 +316,6 @@ func emptyHttpRequest(ctx context.Context) *http.Request {
return req
}
func WorkspaceClient(ctx context.Context) *databricks.WorkspaceClient {
w, ok := ctx.Value(&workspaceClient).(*databricks.WorkspaceClient)
if !ok {
panic("cannot get *databricks.WorkspaceClient. Please report it as a bug")
}
return w
}
func AccountClient(ctx context.Context) *databricks.AccountClient {
a, ok := ctx.Value(&accountClient).(*databricks.AccountClient)
if !ok {
@ -336,11 +323,3 @@ func AccountClient(ctx context.Context) *databricks.AccountClient {
}
return a
}
func ConfigUsed(ctx context.Context) *config.Config {
cfg, ok := ctx.Value(&configUsed).(*config.Config)
if !ok {
panic("cannot get *config.Config. Please report it as a bug")
}
return cfg
}

View File

@ -9,6 +9,7 @@ import (
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/config"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -263,7 +264,7 @@ func TestMustAnyClientCanCreateWorkspaceClient(t *testing.T) {
require.False(t, isAccount)
require.NoError(t, err)
w := WorkspaceClient(cmd.Context())
w := command.WorkspaceClient(cmd.Context())
require.NotNil(t, w)
}

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/env"
"github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/diag"
envlib "github.com/databricks/cli/libs/env"
"github.com/spf13/cobra"
@ -102,7 +103,7 @@ func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag
if err != nil {
return b, diags.Extend(diag.FromErr(err))
}
ctx = context.WithValue(ctx, &configUsed, client.Config)
ctx = command.SetConfigUsed(ctx, client.Config)
cmd.SetContext(ctx)
return b, diags

View File

@ -9,6 +9,7 @@ import (
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/command"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -78,7 +79,7 @@ func TestBundleConfigureDefault(t *testing.T) {
err := setupWithHost(t, cmd, "https://x.com")
require.NoError(t, err)
assert.Equal(t, "https://x.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "https://x.com", command.ConfigUsed(cmd.Context()).Host)
}
func TestBundleConfigureWithMultipleMatches(t *testing.T) {
@ -120,8 +121,8 @@ func TestBundleConfigureWithCorrectProfile(t *testing.T) {
err = setupWithHost(t, cmd, "https://a.com")
require.NoError(t, err)
assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "PROFILE-1", ConfigUsed(cmd.Context()).Profile)
assert.Equal(t, "https://a.com", command.ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "PROFILE-1", command.ConfigUsed(cmd.Context()).Profile)
}
func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) {
@ -144,8 +145,8 @@ func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) {
err = setupWithHost(t, cmd, "https://a.com")
require.NoError(t, err)
assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "PROFILE-1", ConfigUsed(cmd.Context()).Profile)
assert.Equal(t, "https://a.com", command.ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "PROFILE-1", command.ConfigUsed(cmd.Context()).Profile)
}
func TestBundleConfigureProfileDefault(t *testing.T) {
@ -156,9 +157,9 @@ func TestBundleConfigureProfileDefault(t *testing.T) {
err := setupWithProfile(t, cmd, "PROFILE-1")
require.NoError(t, err)
assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "a", ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-1", ConfigUsed(cmd.Context()).Profile)
assert.Equal(t, "https://a.com", command.ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "a", command.ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-1", command.ConfigUsed(cmd.Context()).Profile)
}
func TestBundleConfigureProfileFlag(t *testing.T) {
@ -171,9 +172,9 @@ func TestBundleConfigureProfileFlag(t *testing.T) {
err = setupWithProfile(t, cmd, "PROFILE-1")
require.NoError(t, err)
assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "b", ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-2", ConfigUsed(cmd.Context()).Profile)
assert.Equal(t, "https://a.com", command.ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "b", command.ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-2", command.ConfigUsed(cmd.Context()).Profile)
}
func TestBundleConfigureProfileEnvVariable(t *testing.T) {
@ -185,9 +186,9 @@ func TestBundleConfigureProfileEnvVariable(t *testing.T) {
err := setupWithProfile(t, cmd, "PROFILE-1")
require.NoError(t, err)
assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "b", ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-2", ConfigUsed(cmd.Context()).Profile)
assert.Equal(t, "https://a.com", command.ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "b", command.ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-2", command.ConfigUsed(cmd.Context()).Profile)
}
func TestBundleConfigureProfileFlagAndEnvVariable(t *testing.T) {
@ -201,9 +202,9 @@ func TestBundleConfigureProfileFlagAndEnvVariable(t *testing.T) {
err = setupWithProfile(t, cmd, "PROFILE-1")
require.NoError(t, err)
assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "b", ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-2", ConfigUsed(cmd.Context()).Profile)
assert.Equal(t, "https://a.com", command.ConfigUsed(cmd.Context()).Host)
assert.Equal(t, "b", command.ConfigUsed(cmd.Context()).Token)
assert.Equal(t, "PROFILE-2", command.ConfigUsed(cmd.Context()).Profile)
}
func TestTargetFlagFull(t *testing.T) {
@ -212,7 +213,7 @@ func TestTargetFlagFull(t *testing.T) {
cmd.SetArgs([]string{"version", "--target", "development"})
ctx := context.Background()
err := cmd.ExecuteContext(ctx)
err := Execute(ctx, cmd)
assert.NoError(t, err)
assert.Equal(t, "development", GetTarget(cmd))
@ -224,7 +225,7 @@ func TestTargetFlagShort(t *testing.T) {
cmd.SetArgs([]string{"version", "-t", "production"})
ctx := context.Background()
err := cmd.ExecuteContext(ctx)
err := Execute(ctx, cmd)
assert.NoError(t, err)
assert.Equal(t, "production", GetTarget(cmd))
@ -238,7 +239,7 @@ func TestTargetEnvironmentFlag(t *testing.T) {
cmd.SetArgs([]string{"version", "--environment", "development"})
ctx := context.Background()
err := cmd.ExecuteContext(ctx)
err := Execute(ctx, cmd)
assert.NoError(t, err)
assert.Equal(t, "development", GetTarget(cmd))

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/internal/build"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/log"
"github.com/spf13/cobra"
@ -124,6 +125,9 @@ Stack Trace:
%s`, version, r, string(trace))
}()
// Set a command execution ID value in the context
ctx = command.GenerateExecId(ctx)
// Run the command
cmd, err = cmd.ExecuteContextC(ctx)
if err != nil && !errors.Is(err, ErrAlreadyPrinted) {

View File

@ -3,12 +3,12 @@ package root
import (
"context"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/useragent"
"github.com/google/uuid"
)
func withCommandExecIdInUserAgent(ctx context.Context) context.Context {
// A UUID that will allow us to correlate multiple API requests made by
// the same CLI invocation.
return useragent.InContext(ctx, "cmd-exec-id", uuid.New().String())
return useragent.InContext(ctx, "cmd-exec-id", command.ExecId(ctx))
}

View File

@ -2,25 +2,18 @@ package root
import (
"context"
"regexp"
"testing"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/useragent"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestWithCommandExecIdInUserAgent(t *testing.T) {
ctx := withCommandExecIdInUserAgent(context.Background())
ctx := command.GenerateExecId(context.Background())
ctx = withCommandExecIdInUserAgent(ctx)
// Check that the command exec ID is in the user agent string.
// user agent should contain cmd-exec-id/<UUID>
ua := useragent.FromContext(ctx)
re := regexp.MustCompile(`cmd-exec-id/([a-f0-9-]+)`)
matches := re.FindAllStringSubmatch(ua, -1)
// Assert that we have exactly one match and that it's a valid UUID.
require.Len(t, matches, 1)
_, err := uuid.Parse(matches[0][1])
assert.NoError(t, err)
assert.Regexp(t, `cmd-exec-id/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, ua)
}

24
cmd/selftest/patchwhl.go Normal file
View File

@ -0,0 +1,24 @@
package selftest
import (
"github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/patchwheel"
"github.com/spf13/cobra"
)
func newPatchWhl() *cobra.Command {
return &cobra.Command{
Use: "patchwhl",
Run: func(cmd *cobra.Command, args []string) {
ctx := cmd.Context()
for _, arg := range args {
out, err := patchwheel.PatchWheel(ctx, arg, ".")
if err != nil {
log.Warnf(ctx, "Failed to patch whl: %s: %s", arg, err)
} else {
log.Warnf(ctx, "Patched whl: %s -> %s", arg, out)
}
}
},
}
}

View File

@ -12,5 +12,6 @@ func New() *cobra.Command {
}
cmd.AddCommand(newPanic())
cmd.AddCommand(newPatchWhl())
return cmd
}

View File

@ -12,6 +12,7 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/log"
@ -65,7 +66,7 @@ func (f *syncFlags) syncOptionsFromArgs(cmd *cobra.Command, args []string) (*syn
}
ctx := cmd.Context()
client := root.WorkspaceClient(ctx)
client := command.WorkspaceClient(ctx)
localRoot := vfs.MustNew(args[0])
info, err := git.FetchRepositoryInfo(ctx, localRoot.Native(), client)
@ -186,7 +187,7 @@ func New() *cobra.Command {
case 0:
return nil, cobra.ShellCompDirectiveFilterDirs
case 1:
wsc := root.WorkspaceClient(cmd.Context())
wsc := command.WorkspaceClient(cmd.Context())
return completeRemotePath(cmd.Context(), wsc, toComplete)
default:
return nil, cobra.ShellCompDirectiveNoFileComp

View File

@ -8,7 +8,7 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/vfs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -58,7 +58,7 @@ func TestSyncOptionsFromArgs(t *testing.T) {
f := syncFlags{}
cmd := New()
cmd.SetContext(root.SetWorkspaceClient(context.Background(), nil))
cmd.SetContext(command.SetWorkspaceClient(context.Background(), nil))
opts, err := f.syncOptionsFromArgs(cmd, []string{local, remote})
require.NoError(t, err)
assert.Equal(t, local, opts.LocalRoot.Native())

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/spf13/cobra"
@ -70,7 +71,7 @@ func newCheckPolicy() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := checkPolicyJson.Unmarshal(&checkPolicyReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -73,7 +74,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingAccessPolicy().Delete(ctx, deleteReq)
if err != nil {
@ -130,7 +131,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingAccessPolicy().Get(ctx, getReq)
if err != nil {
@ -180,7 +181,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -73,7 +74,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingApprovedDomains().Delete(ctx, deleteReq)
if err != nil {
@ -128,7 +129,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AibiDashboardEmbeddingApprovedDomains().Get(ctx, getReq)
if err != nil {
@ -180,7 +181,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
@ -90,7 +91,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -160,7 +161,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -232,7 +233,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -299,7 +300,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.AlertsLegacy.List(ctx)
if err != nil {
return err
@ -360,7 +361,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
@ -82,7 +83,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -145,7 +146,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -212,7 +213,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
@ -289,7 +290,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Alerts.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -364,7 +365,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
@ -114,7 +115,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.App)
@ -205,7 +206,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -277,7 +278,7 @@ func newDeploy() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := deployJson.Unmarshal(&deployReq.AppDeployment)
@ -366,7 +367,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -426,7 +427,7 @@ func newGetDeployment() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getDeploymentReq.AppName = args[0]
getDeploymentReq.DeploymentId = args[1]
@ -485,7 +486,7 @@ func newGetPermissionLevels() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionLevelsReq.AppName = args[0]
@ -544,7 +545,7 @@ func newGetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getPermissionsReq.AppName = args[0]
@ -602,7 +603,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Apps.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -658,7 +659,7 @@ func newListDeployments() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listDeploymentsReq.AppName = args[0]
@ -719,7 +720,7 @@ func newSetPermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := setPermissionsJson.Unmarshal(&setPermissionsReq)
@ -794,7 +795,7 @@ func newStart() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
startReq.Name = args[0]
@ -876,7 +877,7 @@ func newStop() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
stopReq.Name = args[0]
@ -965,7 +966,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq.App)
@ -1040,7 +1041,7 @@ func newUpdatePermissions() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updatePermissionsJson.Unmarshal(&updatePermissionsReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -77,7 +78,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
_, err = fmt.Sscan(args[0], &getReq.ArtifactType)
if err != nil {
@ -142,7 +143,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
@ -70,7 +71,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response, err := w.Settings.AutomaticClusterUpdate().Get(ctx, getReq)
if err != nil {
@ -124,7 +125,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
@ -102,7 +103,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq)
@ -177,7 +178,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -239,7 +240,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -302,7 +303,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.Catalogs.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -367,7 +368,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/cleanrooms"
"github.com/spf13/cobra"
@ -96,7 +97,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.Asset)
@ -169,7 +170,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.CleanRoomName = args[0]
_, err = fmt.Sscan(args[1], &deleteReq.AssetType)
@ -235,7 +236,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.CleanRoomName = args[0]
_, err = fmt.Sscan(args[1], &getReq.AssetType)
@ -298,7 +299,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.CleanRoomName = args[0]
@ -376,7 +377,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq.Asset)

View File

@ -5,6 +5,7 @@ package clean_room_task_runs
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/databricks-sdk-go/service/cleanrooms"
"github.com/spf13/cobra"
)
@ -74,7 +75,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
listReq.CleanRoomName = args[0]

View File

@ -5,6 +5,7 @@ package clean_rooms
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/command"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/cleanrooms"
"github.com/spf13/cobra"
@ -92,7 +93,7 @@ func newCreate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createJson.Unmarshal(&createReq.CleanRoom)
@ -166,7 +167,7 @@ func newCreateOutputCatalog() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := createOutputCatalogJson.Unmarshal(&createOutputCatalogReq.OutputCatalog)
@ -239,7 +240,7 @@ func newDelete() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
deleteReq.Name = args[0]
@ -294,7 +295,7 @@ func newGet() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
getReq.Name = args[0]
@ -353,7 +354,7 @@ func newList() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
response := w.CleanRooms.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
@ -413,7 +414,7 @@ func newUpdate() *cobra.Command {
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
w := command.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)

Some files were not shown because too many files have changed in this diff Show More