Generate acceptance tests for all CLI commands

This commit is contained in:
Andrew Nester 2025-01-29 11:56:31 +00:00
parent 884b5f26ed
commit e93f1ed8e8
No known key found for this signature in database
GPG Key ID: 12BC628A44B7DA57
275 changed files with 17637 additions and 3 deletions

View File

@ -1,15 +1,19 @@
{ {
"formatter": "go run golang.org/x/tools/cmd/goimports@latest -w $FILENAMES && go fmt ./...", "formatter": "go run golang.org/x/tools/cmd/goimports@latest -w cmd && go fmt ./...",
"services": { "services": {
".codegen/service.go.tmpl": "cmd/{{if .IsAccounts}}account{{else}}workspace{{end}}/{{(.TrimPrefix \"account\").KebabName}}/{{(.TrimPrefix \"account\").KebabName}}.go" ".codegen/service.go.tmpl": "cmd/{{if .IsAccounts}}account{{else}}workspace{{end}}/{{(.TrimPrefix \"account\").KebabName}}/{{(.TrimPrefix \"account\").KebabName}}.go",
".codegen/cmd_script.tmpl": "acceptance/help/cmd/{{if .IsAccounts}}account{{else}}workspace{{end}}/{{(.TrimPrefix \"account\").KebabName}}/{{(.TrimPrefix \"account\").KebabName}}/script"
}, },
"batch": { "batch": {
".codegen/cmds-workspace.go.tmpl": "cmd/workspace/cmd.go", ".codegen/cmds-workspace.go.tmpl": "cmd/workspace/cmd.go",
".codegen/cmds-account.go.tmpl": "cmd/account/cmd.go" ".codegen/cmds-account.go.tmpl": "cmd/account/cmd.go"
}, },
"toolchain": { "toolchain": {
"required": ["go"], "required": [
"go"
],
"post_generate": [ "post_generate": [
"go test ./acceptance -v -update -run 'TestAccept/help/cmd' ",
"go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build", "go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build",
"make schema", "make schema",
"echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes", "echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes",

6
.codegen/cmd_script.tmpl Normal file
View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
{{- $isAccount := .IsAccounts }}
{{- $cmdGrp := (.TrimPrefix "account").KebabName }}
{{- range .Methods}}
trace $CLI {{if $isAccount }}account {{end}}{{$cmdGrp}} {{.KebabName}} --help
{{- end}}

136
.gitattributes vendored
View File

@ -1,3 +1,139 @@
acceptance/help/cmd/account/access-control/access-control/script linguist-generated=true
acceptance/help/cmd/account/billable-usage/billable-usage/script linguist-generated=true
acceptance/help/cmd/account/budgets/budgets/script linguist-generated=true
acceptance/help/cmd/account/credentials/credentials/script linguist-generated=true
acceptance/help/cmd/account/csp-enablement-account/csp-enablement-account/script linguist-generated=true
acceptance/help/cmd/account/custom-app-integration/custom-app-integration/script linguist-generated=true
acceptance/help/cmd/account/disable-legacy-features/disable-legacy-features/script linguist-generated=true
acceptance/help/cmd/account/encryption-keys/encryption-keys/script linguist-generated=true
acceptance/help/cmd/account/esm-enablement-account/esm-enablement-account/script linguist-generated=true
acceptance/help/cmd/account/federation-policy/federation-policy/script linguist-generated=true
acceptance/help/cmd/account/groups/groups/script linguist-generated=true
acceptance/help/cmd/account/ip-access-lists/ip-access-lists/script linguist-generated=true
acceptance/help/cmd/account/log-delivery/log-delivery/script linguist-generated=true
acceptance/help/cmd/account/metastore-assignments/metastore-assignments/script linguist-generated=true
acceptance/help/cmd/account/metastores/metastores/script linguist-generated=true
acceptance/help/cmd/account/network-connectivity/network-connectivity/script linguist-generated=true
acceptance/help/cmd/account/networks/networks/script linguist-generated=true
acceptance/help/cmd/account/o-auth-published-apps/o-auth-published-apps/script linguist-generated=true
acceptance/help/cmd/account/personal-compute/personal-compute/script linguist-generated=true
acceptance/help/cmd/account/private-access/private-access/script linguist-generated=true
acceptance/help/cmd/account/published-app-integration/published-app-integration/script linguist-generated=true
acceptance/help/cmd/account/service-principal-federation-policy/service-principal-federation-policy/script linguist-generated=true
acceptance/help/cmd/account/service-principal-secrets/service-principal-secrets/script linguist-generated=true
acceptance/help/cmd/account/service-principals/service-principals/script linguist-generated=true
acceptance/help/cmd/account/settings/settings/script linguist-generated=true
acceptance/help/cmd/account/storage-credentials/storage-credentials/script linguist-generated=true
acceptance/help/cmd/account/storage/storage/script linguist-generated=true
acceptance/help/cmd/account/usage-dashboards/usage-dashboards/script linguist-generated=true
acceptance/help/cmd/account/users/users/script linguist-generated=true
acceptance/help/cmd/account/vpc-endpoints/vpc-endpoints/script linguist-generated=true
acceptance/help/cmd/account/workspace-assignment/workspace-assignment/script linguist-generated=true
acceptance/help/cmd/account/workspaces/workspaces/script linguist-generated=true
acceptance/help/cmd/workspace/access-control-proxy/access-control-proxy/script linguist-generated=true
acceptance/help/cmd/workspace/access-control/access-control/script linguist-generated=true
acceptance/help/cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy/script linguist-generated=true
acceptance/help/cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains/script linguist-generated=true
acceptance/help/cmd/workspace/alerts-legacy/alerts-legacy/script linguist-generated=true
acceptance/help/cmd/workspace/alerts/alerts/script linguist-generated=true
acceptance/help/cmd/workspace/apps/apps/script linguist-generated=true
acceptance/help/cmd/workspace/artifact-allowlists/artifact-allowlists/script linguist-generated=true
acceptance/help/cmd/workspace/automatic-cluster-update/automatic-cluster-update/script linguist-generated=true
acceptance/help/cmd/workspace/catalogs/catalogs/script linguist-generated=true
acceptance/help/cmd/workspace/clean-room-assets/clean-room-assets/script linguist-generated=true
acceptance/help/cmd/workspace/clean-room-task-runs/clean-room-task-runs/script linguist-generated=true
acceptance/help/cmd/workspace/clean-rooms/clean-rooms/script linguist-generated=true
acceptance/help/cmd/workspace/cluster-policies/cluster-policies/script linguist-generated=true
acceptance/help/cmd/workspace/clusters/clusters/script linguist-generated=true
acceptance/help/cmd/workspace/command-execution/command-execution/script linguist-generated=true
acceptance/help/cmd/workspace/compliance-security-profile/compliance-security-profile/script linguist-generated=true
acceptance/help/cmd/workspace/connections/connections/script linguist-generated=true
acceptance/help/cmd/workspace/consumer-fulfillments/consumer-fulfillments/script linguist-generated=true
acceptance/help/cmd/workspace/consumer-installations/consumer-installations/script linguist-generated=true
acceptance/help/cmd/workspace/consumer-listings/consumer-listings/script linguist-generated=true
acceptance/help/cmd/workspace/consumer-personalization-requests/consumer-personalization-requests/script linguist-generated=true
acceptance/help/cmd/workspace/consumer-providers/consumer-providers/script linguist-generated=true
acceptance/help/cmd/workspace/credentials-manager/credentials-manager/script linguist-generated=true
acceptance/help/cmd/workspace/credentials/credentials/script linguist-generated=true
acceptance/help/cmd/workspace/current-user/current-user/script linguist-generated=true
acceptance/help/cmd/workspace/dashboard-widgets/dashboard-widgets/script linguist-generated=true
acceptance/help/cmd/workspace/dashboards/dashboards/script linguist-generated=true
acceptance/help/cmd/workspace/data-sources/data-sources/script linguist-generated=true
acceptance/help/cmd/workspace/dbfs/dbfs/script linguist-generated=true
acceptance/help/cmd/workspace/dbsql-permissions/dbsql-permissions/script linguist-generated=true
acceptance/help/cmd/workspace/default-namespace/default-namespace/script linguist-generated=true
acceptance/help/cmd/workspace/disable-legacy-access/disable-legacy-access/script linguist-generated=true
acceptance/help/cmd/workspace/disable-legacy-dbfs/disable-legacy-dbfs/script linguist-generated=true
acceptance/help/cmd/workspace/enhanced-security-monitoring/enhanced-security-monitoring/script linguist-generated=true
acceptance/help/cmd/workspace/experiments/experiments/script linguist-generated=true
acceptance/help/cmd/workspace/external-locations/external-locations/script linguist-generated=true
acceptance/help/cmd/workspace/files/files/script linguist-generated=true
acceptance/help/cmd/workspace/functions/functions/script linguist-generated=true
acceptance/help/cmd/workspace/genie/genie/script linguist-generated=true
acceptance/help/cmd/workspace/git-credentials/git-credentials/script linguist-generated=true
acceptance/help/cmd/workspace/global-init-scripts/global-init-scripts/script linguist-generated=true
acceptance/help/cmd/workspace/grants/grants/script linguist-generated=true
acceptance/help/cmd/workspace/groups/groups/script linguist-generated=true
acceptance/help/cmd/workspace/instance-pools/instance-pools/script linguist-generated=true
acceptance/help/cmd/workspace/instance-profiles/instance-profiles/script linguist-generated=true
acceptance/help/cmd/workspace/ip-access-lists/ip-access-lists/script linguist-generated=true
acceptance/help/cmd/workspace/jobs/jobs/script linguist-generated=true
acceptance/help/cmd/workspace/lakeview/lakeview/script linguist-generated=true
acceptance/help/cmd/workspace/libraries/libraries/script linguist-generated=true
acceptance/help/cmd/workspace/metastores/metastores/script linguist-generated=true
acceptance/help/cmd/workspace/model-registry/model-registry/script linguist-generated=true
acceptance/help/cmd/workspace/model-versions/model-versions/script linguist-generated=true
acceptance/help/cmd/workspace/notification-destinations/notification-destinations/script linguist-generated=true
acceptance/help/cmd/workspace/online-tables/online-tables/script linguist-generated=true
acceptance/help/cmd/workspace/permission-migration/permission-migration/script linguist-generated=true
acceptance/help/cmd/workspace/permissions/permissions/script linguist-generated=true
acceptance/help/cmd/workspace/pipelines/pipelines/script linguist-generated=true
acceptance/help/cmd/workspace/policy-compliance-for-clusters/policy-compliance-for-clusters/script linguist-generated=true
acceptance/help/cmd/workspace/policy-compliance-for-jobs/policy-compliance-for-jobs/script linguist-generated=true
acceptance/help/cmd/workspace/policy-families/policy-families/script linguist-generated=true
acceptance/help/cmd/workspace/provider-exchange-filters/provider-exchange-filters/script linguist-generated=true
acceptance/help/cmd/workspace/provider-exchanges/provider-exchanges/script linguist-generated=true
acceptance/help/cmd/workspace/provider-files/provider-files/script linguist-generated=true
acceptance/help/cmd/workspace/provider-listings/provider-listings/script linguist-generated=true
acceptance/help/cmd/workspace/provider-personalization-requests/provider-personalization-requests/script linguist-generated=true
acceptance/help/cmd/workspace/provider-provider-analytics-dashboards/provider-provider-analytics-dashboards/script linguist-generated=true
acceptance/help/cmd/workspace/provider-providers/provider-providers/script linguist-generated=true
acceptance/help/cmd/workspace/providers/providers/script linguist-generated=true
acceptance/help/cmd/workspace/quality-monitors/quality-monitors/script linguist-generated=true
acceptance/help/cmd/workspace/queries-legacy/queries-legacy/script linguist-generated=true
acceptance/help/cmd/workspace/queries/queries/script linguist-generated=true
acceptance/help/cmd/workspace/query-history/query-history/script linguist-generated=true
acceptance/help/cmd/workspace/query-visualizations-legacy/query-visualizations-legacy/script linguist-generated=true
acceptance/help/cmd/workspace/query-visualizations/query-visualizations/script linguist-generated=true
acceptance/help/cmd/workspace/recipient-activation/recipient-activation/script linguist-generated=true
acceptance/help/cmd/workspace/recipients/recipients/script linguist-generated=true
acceptance/help/cmd/workspace/registered-models/registered-models/script linguist-generated=true
acceptance/help/cmd/workspace/repos/repos/script linguist-generated=true
acceptance/help/cmd/workspace/resource-quotas/resource-quotas/script linguist-generated=true
acceptance/help/cmd/workspace/restrict-workspace-admins/restrict-workspace-admins/script linguist-generated=true
acceptance/help/cmd/workspace/schemas/schemas/script linguist-generated=true
acceptance/help/cmd/workspace/secrets/secrets/script linguist-generated=true
acceptance/help/cmd/workspace/service-principals/service-principals/script linguist-generated=true
acceptance/help/cmd/workspace/serving-endpoints-data-plane/serving-endpoints-data-plane/script linguist-generated=true
acceptance/help/cmd/workspace/serving-endpoints/serving-endpoints/script linguist-generated=true
acceptance/help/cmd/workspace/settings/settings/script linguist-generated=true
acceptance/help/cmd/workspace/shares/shares/script linguist-generated=true
acceptance/help/cmd/workspace/statement-execution/statement-execution/script linguist-generated=true
acceptance/help/cmd/workspace/storage-credentials/storage-credentials/script linguist-generated=true
acceptance/help/cmd/workspace/system-schemas/system-schemas/script linguist-generated=true
acceptance/help/cmd/workspace/table-constraints/table-constraints/script linguist-generated=true
acceptance/help/cmd/workspace/tables/tables/script linguist-generated=true
acceptance/help/cmd/workspace/temporary-table-credentials/temporary-table-credentials/script linguist-generated=true
acceptance/help/cmd/workspace/token-management/token-management/script linguist-generated=true
acceptance/help/cmd/workspace/tokens/tokens/script linguist-generated=true
acceptance/help/cmd/workspace/users/users/script linguist-generated=true
acceptance/help/cmd/workspace/vector-search-endpoints/vector-search-endpoints/script linguist-generated=true
acceptance/help/cmd/workspace/vector-search-indexes/vector-search-indexes/script linguist-generated=true
acceptance/help/cmd/workspace/volumes/volumes/script linguist-generated=true
acceptance/help/cmd/workspace/warehouses/warehouses/script linguist-generated=true
acceptance/help/cmd/workspace/workspace-bindings/workspace-bindings/script linguist-generated=true
acceptance/help/cmd/workspace/workspace-conf/workspace-conf/script linguist-generated=true
acceptance/help/cmd/workspace/workspace/workspace/script linguist-generated=true
cmd/account/access-control/access-control.go linguist-generated=true cmd/account/access-control/access-control.go linguist-generated=true
cmd/account/billable-usage/billable-usage.go linguist-generated=true cmd/account/billable-usage/billable-usage.go linguist-generated=true
cmd/account/budgets/budgets.go linguist-generated=true cmd/account/budgets/budgets.go linguist-generated=true

View File

@ -0,0 +1,72 @@
>>> $CLI account access-control get-assignable-roles-for-resource --help
Get assignable roles for a resource.
Gets all the roles that can be granted on an account level resource. A role is
grantable if the rule set on the resource can contain an access rule of the
role.
Arguments:
RESOURCE: The resource name for which assignable roles will be listed.
Usage:
databricks account access-control get-assignable-roles-for-resource RESOURCE [flags]
Flags:
-h, --help help for get-assignable-roles-for-resource
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account access-control get-rule-set --help
Get a rule set.
Get a rule set by its name. A rule set is always attached to a resource and
contains a list of access rules on the said resource. Currently only a default
rule set for each resource is supported.
Arguments:
NAME: The ruleset name associated with the request.
ETAG: Etag used for versioning. The response is at least as fresh as the eTag
provided. Etag is used for optimistic concurrency control as a way to help
prevent simultaneous updates of a rule set from overwriting each other. It
is strongly suggested that systems make use of the etag in the read ->
modify -> write pattern to perform rule set updates in order to avoid race
conditions that is get an etag from a GET rule set request, and pass it
with the PUT update request to identify the rule set version you are
updating.
Usage:
databricks account access-control get-rule-set NAME ETAG [flags]
Flags:
-h, --help help for get-rule-set
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account access-control update-rule-set --help
Update a rule set.
Replace the rules of a rule set. First, use get to read the current version of
the rule set before modifying it. This pattern helps prevent conflicts between
concurrent updates.
Usage:
databricks account access-control update-rule-set [flags]
Flags:
-h, --help help for update-rule-set
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account access-control get-assignable-roles-for-resource --help
trace $CLI account access-control get-rule-set --help
trace $CLI account access-control update-rule-set --help

View File

@ -0,0 +1,33 @@
>>> $CLI account billable-usage download --help
Return billable usage logs.
Returns billable usage logs in CSV format for the specified account and date
range. For the data schema, see [CSV file schema]. Note that this method might
take multiple minutes to complete.
**Warning**: Depending on the queried date range, the number of workspaces in
the account, the size of the response and the internet speed of the caller,
this API may hit a timeout after a few minutes. If you experience this, try to
mitigate by calling the API with narrower date ranges.
[CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
Arguments:
START_MONTH: Format: YYYY-MM. First month to return billable usage logs for. This
field is required.
END_MONTH: Format: YYYY-MM. Last month to return billable usage logs for. This
field is required.
Usage:
databricks account billable-usage download START_MONTH END_MONTH [flags]
Flags:
-h, --help help for download
--personal-data Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,2 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account billable-usage download --help

View File

@ -0,0 +1,101 @@
>>> $CLI account budgets create --help
Create new budget.
Create a new budget configuration for an account. For full details, see
https://docs.databricks.com/en/admin/account-settings/budgets.html.
Usage:
databricks account budgets create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account budgets delete --help
Delete budget.
Deletes a budget configuration for an account. Both account and budget
configuration are specified by ID. This cannot be undone.
Arguments:
BUDGET_ID: The Databricks budget configuration ID.
Usage:
databricks account budgets delete BUDGET_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account budgets get --help
Get budget.
Gets a budget configuration for an account. Both account and budget
configuration are specified by ID.
Arguments:
BUDGET_ID: The budget configuration ID
Usage:
databricks account budgets get BUDGET_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account budgets list --help
Get all budgets.
Gets all budgets associated with this account.
Usage:
databricks account budgets list [flags]
Flags:
-h, --help help for list
--page-token string A page token received from a previous get all budget configurations call.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account budgets update --help
Modify budget.
Updates a budget configuration for an account. Both account and budget
configuration are specified by ID.
Arguments:
BUDGET_ID: The Databricks budget configuration ID.
Usage:
databricks account budgets update BUDGET_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account budgets create --help
trace $CLI account budgets delete --help
trace $CLI account budgets get --help
trace $CLI account budgets list --help
trace $CLI account budgets update --help

View File

@ -0,0 +1,92 @@
>>> $CLI account credentials create --help
Create credential configuration.
Creates a Databricks credential configuration that represents cloud
cross-account credentials for a specified account. Databricks uses this to set
up network infrastructure properly to host Databricks clusters. For your AWS
IAM role, you need to trust the External ID (the Databricks Account API
account ID) in the returned credential object, and configure the required
access policy.
Save the response's credentials_id field, which is the ID for your new
credential configuration object.
For information about how to create a new workspace with this API, see [Create
a new workspace using the Account API]
[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
Usage:
databricks account credentials create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account credentials delete --help
Delete credential configuration.
Deletes a Databricks credential configuration object for an account, both
specified by ID. You cannot delete a credential that is associated with any
workspace.
Arguments:
CREDENTIALS_ID: Databricks Account API credential configuration ID
Usage:
databricks account credentials delete CREDENTIALS_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account credentials get --help
Get credential configuration.
Gets a Databricks credential configuration object for an account, both
specified by ID.
Arguments:
CREDENTIALS_ID: Databricks Account API credential configuration ID
Usage:
databricks account credentials get CREDENTIALS_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account credentials list --help
Get all credential configurations.
Gets all Databricks credential configurations associated with an account
specified by ID.
Usage:
databricks account credentials list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account credentials create --help
trace $CLI account credentials delete --help
trace $CLI account credentials get --help
trace $CLI account credentials list --help

View File

@ -0,0 +1,110 @@
>>> $CLI account csp-enablement-account get --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.
>>> $CLI account csp-enablement-account update --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.

View File

@ -0,0 +1,3 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account csp-enablement-account get --help
trace $CLI account csp-enablement-account update --help

View File

@ -0,0 +1,101 @@
>>> $CLI account custom-app-integration create --help
Create Custom OAuth App Integration.
Create Custom OAuth App Integration.
You can retrieve the custom OAuth app integration via
:method:CustomAppIntegration/get.
Usage:
databricks account custom-app-integration create [flags]
Flags:
--confidential This field indicates whether an OAuth client secret is required to authenticate this client.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string Name of the custom OAuth app.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account custom-app-integration delete --help
Delete Custom OAuth App Integration.
Delete an existing Custom OAuth App Integration. You can retrieve the custom
OAuth app integration via :method:CustomAppIntegration/get.
Usage:
databricks account custom-app-integration delete INTEGRATION_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account custom-app-integration get --help
Get OAuth Custom App Integration.
Gets the Custom OAuth App Integration for the given integration id.
Arguments:
INTEGRATION_ID: The OAuth app integration ID.
Usage:
databricks account custom-app-integration get INTEGRATION_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account custom-app-integration list --help
Get custom oauth app integrations.
Get the list of custom OAuth app integrations for the specified Databricks
account
Usage:
databricks account custom-app-integration list [flags]
Flags:
-h, --help help for list
--include-creator-username
--page-size int
--page-token string
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account custom-app-integration update --help
Updates Custom OAuth App Integration.
Updates an existing custom OAuth App Integration. You can retrieve the custom
OAuth app integration via :method:CustomAppIntegration/get.
Usage:
databricks account custom-app-integration update INTEGRATION_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account custom-app-integration create --help
trace $CLI account custom-app-integration delete --help
trace $CLI account custom-app-integration get --help
trace $CLI account custom-app-integration list --help
trace $CLI account custom-app-integration update --help

View File

@ -0,0 +1,165 @@
>>> $CLI account disable-legacy-features delete --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.
>>> $CLI account disable-legacy-features get --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.
>>> $CLI account disable-legacy-features update --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account disable-legacy-features delete --help
trace $CLI account disable-legacy-features get --help
trace $CLI account disable-legacy-features update --help

View File

@ -0,0 +1,118 @@
>>> $CLI account encryption-keys create --help
Create encryption key configuration.
Creates a customer-managed key configuration object for an account, specified
by ID. This operation uploads a reference to a customer-managed key to
Databricks. If the key is assigned as a workspace's customer-managed key for
managed services, Databricks uses the key to encrypt the workspaces notebooks
and secrets in the control plane, in addition to Databricks SQL queries and
query history. If it is specified as a workspace's customer-managed key for
workspace storage, the key encrypts the workspace's root S3 bucket (which
contains the workspace's root DBFS and system data) and, optionally, cluster
EBS volume data.
**Important**: Customer-managed keys are supported only for some deployment
types, subscription types, and AWS regions that currently support creation of
Databricks workspaces.
This operation is available only if your account is on the E2 version of the
platform or on a select custom plan that allows multiple workspaces per
account.
Usage:
databricks account encryption-keys create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account encryption-keys delete --help
Delete encryption key configuration.
Deletes a customer-managed key configuration object for an account. You cannot
delete a configuration that is associated with a running workspace.
Arguments:
CUSTOMER_MANAGED_KEY_ID: Databricks encryption key configuration ID.
Usage:
databricks account encryption-keys delete CUSTOMER_MANAGED_KEY_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account encryption-keys get --help
Get encryption key configuration.
Gets a customer-managed key configuration object for an account, specified by
ID. This operation uploads a reference to a customer-managed key to
Databricks. If assigned as a workspace's customer-managed key for managed
services, Databricks uses the key to encrypt the workspaces notebooks and
secrets in the control plane, in addition to Databricks SQL queries and query
history. If it is specified as a workspace's customer-managed key for storage,
the key encrypts the workspace's root S3 bucket (which contains the
workspace's root DBFS and system data) and, optionally, cluster EBS volume
data.
**Important**: Customer-managed keys are supported only for some deployment
types, subscription types, and AWS regions.
This operation is available only if your account is on the E2 version of the
platform.",
Arguments:
CUSTOMER_MANAGED_KEY_ID: Databricks encryption key configuration ID.
Usage:
databricks account encryption-keys get CUSTOMER_MANAGED_KEY_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account encryption-keys list --help
Get all encryption key configurations.
Gets all customer-managed key configuration objects for an account. If the key
is specified as a workspace's managed services customer-managed key,
Databricks uses the key to encrypt the workspace's notebooks and secrets in
the control plane, in addition to Databricks SQL queries and query history. If
the key is specified as a workspace's storage customer-managed key, the key is
used to encrypt the workspace's root S3 bucket and optionally can encrypt
cluster EBS volumes data in the data plane.
**Important**: Customer-managed keys are supported only for some deployment
types, subscription types, and AWS regions.
This operation is available only if your account is on the E2 version of the
platform.
Usage:
databricks account encryption-keys list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account encryption-keys create --help
trace $CLI account encryption-keys delete --help
trace $CLI account encryption-keys get --help
trace $CLI account encryption-keys list --help

View File

@ -0,0 +1,110 @@
>>> $CLI account esm-enablement-account get --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.
>>> $CLI account esm-enablement-account update --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.

View File

@ -0,0 +1,3 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account esm-enablement-account get --help
trace $CLI account esm-enablement-account update --help

View File

@ -0,0 +1,94 @@
>>> $CLI account federation-policy create --help
Create account federation policy.
Usage:
databricks account federation-policy create [flags]
Flags:
--description string Description of the federation policy.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string Resource name for the federation policy.
--policy-id string The identifier for the federation policy.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account federation-policy delete --help
Delete account federation policy.
Arguments:
POLICY_ID: The identifier for the federation policy.
Usage:
databricks account federation-policy delete POLICY_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account federation-policy get --help
Get account federation policy.
Arguments:
POLICY_ID: The identifier for the federation policy.
Usage:
databricks account federation-policy get POLICY_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account federation-policy list --help
List account federation policies.
Usage:
databricks account federation-policy list [flags]
Flags:
-h, --help help for list
--page-size int
--page-token string
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account federation-policy update --help
Update account federation policy.
Arguments:
POLICY_ID: The identifier for the federation policy.
Usage:
databricks account federation-policy update POLICY_ID [flags]
Flags:
--description string Description of the federation policy.
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string Resource name for the federation policy.
--update-mask string The field mask specifies which fields of the policy to update.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account federation-policy create --help
trace $CLI account federation-policy delete --help
trace $CLI account federation-policy get --help
trace $CLI account federation-policy list --help
trace $CLI account federation-policy update --help

View File

@ -0,0 +1,131 @@
>>> $CLI account groups create --help
Create a new group.
Creates a group in the Databricks account with a unique name, using the
supplied group details.
Usage:
databricks account groups create [flags]
Flags:
--display-name string String that represents a human-readable group name.
--external-id string
-h, --help help for create
--id string Databricks group ID.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account groups delete --help
Delete a group.
Deletes a group from the Databricks account.
Arguments:
ID: Unique ID for a group in the Databricks account.
Usage:
databricks account groups delete ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account groups get --help
Get group details.
Gets the information for a specific group in the Databricks account.
Arguments:
ID: Unique ID for a group in the Databricks account.
Usage:
databricks account groups get ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account groups list --help
List group details.
Gets all details of the groups associated with the Databricks account.
Usage:
databricks account groups list [flags]
Flags:
--attributes string Comma-separated list of attributes to return in response.
--count int Desired number of results per page.
--excluded-attributes string Comma-separated list of attributes to exclude in response.
--filter string Query by which the results have to be filtered.
-h, --help help for list
--sort-by string Attribute to sort the results.
--sort-order ListSortOrder The order to sort the results. Supported values: [ascending, descending]
--start-index int Specifies the index of the first result.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account groups patch --help
Update group details.
Partially updates the details of a group.
Arguments:
ID: Unique ID for a group in the Databricks account.
Usage:
databricks account groups patch ID [flags]
Flags:
-h, --help help for patch
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account groups update --help
Replace a group.
Updates the details of a group by replacing the entire group entity.
Arguments:
ID: Databricks group ID
Usage:
databricks account groups update ID [flags]
Flags:
--display-name string String that represents a human-readable group name.
--external-id string
-h, --help help for update
--id string Databricks group ID.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,7 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account groups create --help
trace $CLI account groups delete --help
trace $CLI account groups get --help
trace $CLI account groups list --help
trace $CLI account groups patch --help
trace $CLI account groups update --help

View File

@ -0,0 +1,172 @@
>>> $CLI account ip-access-lists create --help
Create access list.
Creates an IP access list for the account.
A list can be an allow list or a block list. See the top of this file for a
description of how the server treats allow lists and block lists at runtime.
When creating or updating an IP access list:
* For all allow lists and block lists combined, the API supports a maximum of
1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to
exceed that number return error 400 with error_code value QUOTA_EXCEEDED.
* If the new list would block the calling user's current IP, error 400 is
returned with error_code value INVALID_STATE.
It can take a few minutes for the changes to take effect.
Arguments:
LABEL: Label for the IP access list. This **cannot** be empty.
LIST_TYPE: Type of IP access list. Valid values are as follows and are
case-sensitive:
* ALLOW: An allow list. Include this IP or range. * BLOCK: A block
list. Exclude this IP or range. IP addresses in the block list are
excluded even if they are included in an allow list.
Usage:
databricks account ip-access-lists create LABEL LIST_TYPE [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account ip-access-lists delete --help
Delete access list.
Deletes an IP access list, specified by its list ID.
Arguments:
IP_ACCESS_LIST_ID: The ID for the corresponding IP access list
Usage:
databricks account ip-access-lists delete IP_ACCESS_LIST_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account ip-access-lists get --help
Get IP access list.
Gets an IP access list, specified by its list ID.
Arguments:
IP_ACCESS_LIST_ID: The ID for the corresponding IP access list
Usage:
databricks account ip-access-lists get IP_ACCESS_LIST_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account ip-access-lists list --help
Get access lists.
Gets all IP access lists for the specified account.
Usage:
databricks account ip-access-lists list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account ip-access-lists replace --help
Replace access list.
Replaces an IP access list, specified by its ID.
A list can include allow lists and block lists. See the top of this file for a
description of how the server treats allow lists and block lists at run time.
When replacing an IP access list: * For all allow lists and block lists
combined, the API supports a maximum of 1000 IP/CIDR values, where one CIDR
counts as a single value. Attempts to exceed that number return error 400 with
error_code value QUOTA_EXCEEDED. * If the resulting list would block the
calling user's current IP, error 400 is returned with error_code value
INVALID_STATE. It can take a few minutes for the changes to take effect.
Arguments:
IP_ACCESS_LIST_ID: The ID for the corresponding IP access list
LABEL: Label for the IP access list. This **cannot** be empty.
LIST_TYPE: Type of IP access list. Valid values are as follows and are
case-sensitive:
* ALLOW: An allow list. Include this IP or range. * BLOCK: A block
list. Exclude this IP or range. IP addresses in the block list are
excluded even if they are included in an allow list.
ENABLED: Specifies whether this IP access list is enabled.
Usage:
databricks account ip-access-lists replace IP_ACCESS_LIST_ID LABEL LIST_TYPE ENABLED [flags]
Flags:
-h, --help help for replace
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account ip-access-lists update --help
Update access list.
Updates an existing IP access list, specified by its ID.
A list can include allow lists and block lists. See the top of this file for a
description of how the server treats allow lists and block lists at run time.
When updating an IP access list:
* For all allow lists and block lists combined, the API supports a maximum of
1000 IP/CIDR values, where one CIDR counts as a single value. Attempts to
exceed that number return error 400 with error_code value QUOTA_EXCEEDED.
* If the updated list would block the calling user's current IP, error 400 is
returned with error_code value INVALID_STATE.
It can take a few minutes for the changes to take effect.
Arguments:
IP_ACCESS_LIST_ID: The ID for the corresponding IP access list
Usage:
databricks account ip-access-lists update IP_ACCESS_LIST_ID [flags]
Flags:
--enabled Specifies whether this IP access list is enabled.
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--label string Label for the IP access list.
--list-type ListType Type of IP access list. Supported values: [ALLOW, BLOCK]
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,7 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account ip-access-lists create --help
trace $CLI account ip-access-lists delete --help
trace $CLI account ip-access-lists get --help
trace $CLI account ip-access-lists list --help
trace $CLI account ip-access-lists replace --help
trace $CLI account ip-access-lists update --help

View File

@ -0,0 +1,114 @@
>>> $CLI account log-delivery create --help
Create a new log delivery configuration.
Creates a new Databricks log delivery configuration to enable delivery of the
specified type of logs to your storage location. This requires that you
already created a [credential object](:method:Credentials/Create) (which
encapsulates a cross-account service IAM role) and a [storage configuration
object](:method:Storage/Create) (which encapsulates an S3 bucket).
For full details, including the required IAM role policies and bucket
policies, see [Deliver and access billable usage logs] or [Configure audit
logging].
**Note**: There is a limit on the number of log delivery configurations
available per account (each limit applies separately to each log type
including billable usage and audit logs). You can create a maximum of two
enabled account-level delivery configurations (configurations without a
workspace filter) per type. Additionally, you can create two enabled
workspace-level delivery configurations per workspace for each log type, which
means that the same workspace ID can occur in the workspace filter for no more
than two delivery configurations per log type.
You cannot delete a log delivery configuration, but you can disable it (see
[Enable or disable log delivery
configuration](:method:LogDelivery/PatchStatus)).
[Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
[Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
Usage:
databricks account log-delivery create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account log-delivery get --help
Get log delivery configuration.
Gets a Databricks log delivery configuration object for an account, both
specified by ID.
Arguments:
LOG_DELIVERY_CONFIGURATION_ID: Databricks log delivery configuration ID
Usage:
databricks account log-delivery get LOG_DELIVERY_CONFIGURATION_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account log-delivery list --help
Get all log delivery configurations.
Gets all Databricks log delivery configurations associated with an account
specified by ID.
Usage:
databricks account log-delivery list [flags]
Flags:
--credentials-id string Filter by credential configuration ID.
-h, --help help for list
--status LogDeliveryConfigStatus Filter by status ENABLED or DISABLED. Supported values: [DISABLED, ENABLED]
--storage-configuration-id string Filter by storage configuration ID.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account log-delivery patch-status --help
Enable or disable log delivery configuration.
Enables or disables a log delivery configuration. Deletion of delivery
configurations is not supported, so disable log delivery configurations that
are no longer needed. Note that you can't re-enable a delivery configuration
if this would violate the delivery configuration limits described under
[Create log delivery](:method:LogDelivery/Create).
Arguments:
LOG_DELIVERY_CONFIGURATION_ID: Databricks log delivery configuration ID
STATUS: Status of log delivery configuration. Set to ENABLED (enabled) or
DISABLED (disabled). Defaults to ENABLED. You can [enable or disable
the configuration](#operation/patch-log-delivery-config-status) later.
Deletion of a configuration is not supported, so disable a log delivery
configuration that is no longer needed.
Usage:
databricks account log-delivery patch-status LOG_DELIVERY_CONFIGURATION_ID STATUS [flags]
Flags:
-h, --help help for patch-status
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account log-delivery create --help
trace $CLI account log-delivery get --help
trace $CLI account log-delivery list --help
trace $CLI account log-delivery patch-status --help

View File

@ -0,0 +1,111 @@
>>> $CLI account metastore-assignments create --help
Assigns a workspace to a metastore.
Creates an assignment to a metastore for a workspace
Arguments:
WORKSPACE_ID: Workspace ID.
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastore-assignments create WORKSPACE_ID METASTORE_ID [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastore-assignments delete --help
Delete a metastore assignment.
Deletes a metastore assignment to a workspace, leaving the workspace with no
metastore.
Arguments:
WORKSPACE_ID: Workspace ID.
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastore-assignments delete WORKSPACE_ID METASTORE_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastore-assignments get --help
Gets the metastore assignment for a workspace.
Gets the metastore assignment, if any, for the workspace specified by ID. If
the workspace is assigned a metastore, the mappig will be returned. If no
metastore is assigned to the workspace, the assignment will not be found and a
404 returned.
Arguments:
WORKSPACE_ID: Workspace ID.
Usage:
databricks account metastore-assignments get WORKSPACE_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastore-assignments list --help
Get all workspaces assigned to a metastore.
Gets a list of all Databricks workspace IDs that have been assigned to given
metastore.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastore-assignments list METASTORE_ID [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastore-assignments update --help
Updates a metastore assignment to a workspaces.
Updates an assignment to a metastore for a workspace. Currently, only the
default catalog may be updated.
Arguments:
WORKSPACE_ID: Workspace ID.
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastore-assignments update WORKSPACE_ID METASTORE_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account metastore-assignments create --help
trace $CLI account metastore-assignments delete --help
trace $CLI account metastore-assignments get --help
trace $CLI account metastore-assignments list --help
trace $CLI account metastore-assignments update --help

View File

@ -0,0 +1,97 @@
>>> $CLI account metastores create --help
Create metastore.
Creates a Unity Catalog metastore.
Usage:
databricks account metastores create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastores delete --help
Delete a metastore.
Deletes a Unity Catalog metastore for an account, both specified by ID.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastores delete METASTORE_ID [flags]
Flags:
--force Force deletion even if the metastore is not empty.
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastores get --help
Get a metastore.
Gets a Unity Catalog metastore from an account, both specified by ID.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastores get METASTORE_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastores list --help
Get all metastores associated with an account.
Gets all Unity Catalog metastores associated with an account specified by ID.
Usage:
databricks account metastores list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account metastores update --help
Update a metastore.
Updates an existing Unity Catalog metastore.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account metastores update METASTORE_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account metastores create --help
trace $CLI account metastores delete --help
trace $CLI account metastores get --help
trace $CLI account metastores list --help
trace $CLI account metastores update --help

View File

@ -0,0 +1,184 @@
>>> $CLI account network-connectivity create-network-connectivity-configuration --help
Create a network connectivity configuration.
Arguments:
NAME: The name of the network connectivity configuration. The name can contain
alphanumeric characters, hyphens, and underscores. The length must be
between 3 and 30 characters. The name must match the regular expression
^[0-9a-zA-Z-_]{3,30}$.
REGION: The region for the network connectivity configuration. Only workspaces in
the same region can be attached to the network connectivity configuration.
Usage:
databricks account network-connectivity create-network-connectivity-configuration NAME REGION [flags]
Flags:
-h, --help help for create-network-connectivity-configuration
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity create-private-endpoint-rule --help
Create a private endpoint rule.
Create a private endpoint rule for the specified network connectivity config
object. Once the object is created, Databricks asynchronously provisions a new
Azure private endpoint to your specified Azure resource.
**IMPORTANT**: You must use Azure portal or other Azure tools to approve the
private endpoint to complete the connection. To get the information of the
private endpoint created, make a GET request on the new private endpoint
rule. See [serverless private link].
[serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
Arguments:
NETWORK_CONNECTIVITY_CONFIG_ID: Your Network Connectvity Configuration ID.
RESOURCE_ID: The Azure resource ID of the target resource.
GROUP_ID: The sub-resource type (group ID) of the target resource. Note that to
connect to workspace root storage (root DBFS), you need two endpoints, one
for blob and one for dfs.
Usage:
databricks account network-connectivity create-private-endpoint-rule NETWORK_CONNECTIVITY_CONFIG_ID RESOURCE_ID GROUP_ID [flags]
Flags:
-h, --help help for create-private-endpoint-rule
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity delete-network-connectivity-configuration --help
Delete a network connectivity configuration.
Deletes a network connectivity configuration.
Arguments:
NETWORK_CONNECTIVITY_CONFIG_ID: Your Network Connectvity Configuration ID.
Usage:
databricks account network-connectivity delete-network-connectivity-configuration NETWORK_CONNECTIVITY_CONFIG_ID [flags]
Flags:
-h, --help help for delete-network-connectivity-configuration
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity delete-private-endpoint-rule --help
Delete a private endpoint rule.
Initiates deleting a private endpoint rule. If the connection state is PENDING
or EXPIRED, the private endpoint is immediately deleted. Otherwise, the
private endpoint is deactivated and will be deleted after seven days of
deactivation. When a private endpoint is deactivated, the deactivated field
is set to true and the private endpoint is not available to your serverless
compute resources.
Arguments:
NETWORK_CONNECTIVITY_CONFIG_ID: Your Network Connectvity Configuration ID.
PRIVATE_ENDPOINT_RULE_ID: Your private endpoint rule ID.
Usage:
databricks account network-connectivity delete-private-endpoint-rule NETWORK_CONNECTIVITY_CONFIG_ID PRIVATE_ENDPOINT_RULE_ID [flags]
Flags:
-h, --help help for delete-private-endpoint-rule
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity get-network-connectivity-configuration --help
Get a network connectivity configuration.
Gets a network connectivity configuration.
Arguments:
NETWORK_CONNECTIVITY_CONFIG_ID: Your Network Connectvity Configuration ID.
Usage:
databricks account network-connectivity get-network-connectivity-configuration NETWORK_CONNECTIVITY_CONFIG_ID [flags]
Flags:
-h, --help help for get-network-connectivity-configuration
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity get-private-endpoint-rule --help
Get a private endpoint rule.
Gets the private endpoint rule.
Arguments:
NETWORK_CONNECTIVITY_CONFIG_ID: Your Network Connectvity Configuration ID.
PRIVATE_ENDPOINT_RULE_ID: Your private endpoint rule ID.
Usage:
databricks account network-connectivity get-private-endpoint-rule NETWORK_CONNECTIVITY_CONFIG_ID PRIVATE_ENDPOINT_RULE_ID [flags]
Flags:
-h, --help help for get-private-endpoint-rule
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity list-network-connectivity-configurations --help
List network connectivity configurations.
Gets an array of network connectivity configurations.
Usage:
databricks account network-connectivity list-network-connectivity-configurations [flags]
Flags:
-h, --help help for list-network-connectivity-configurations
--page-token string Pagination token to go to next page based on previous query.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account network-connectivity list-private-endpoint-rules --help
List private endpoint rules.
Gets an array of private endpoint rules.
Arguments:
NETWORK_CONNECTIVITY_CONFIG_ID: Your Network Connectvity Configuration ID.
Usage:
databricks account network-connectivity list-private-endpoint-rules NETWORK_CONNECTIVITY_CONFIG_ID [flags]
Flags:
-h, --help help for list-private-endpoint-rules
--page-token string Pagination token to go to next page based on previous query.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,9 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account network-connectivity create-network-connectivity-configuration --help
trace $CLI account network-connectivity create-private-endpoint-rule --help
trace $CLI account network-connectivity delete-network-connectivity-configuration --help
trace $CLI account network-connectivity delete-private-endpoint-rule --help
trace $CLI account network-connectivity get-network-connectivity-configuration --help
trace $CLI account network-connectivity get-private-endpoint-rule --help
trace $CLI account network-connectivity list-network-connectivity-configurations --help
trace $CLI account network-connectivity list-private-endpoint-rules --help

View File

@ -0,0 +1,91 @@
>>> $CLI account networks create --help
Create network configuration.
Creates a Databricks network configuration that represents an VPC and its
resources. The VPC will be used for new Databricks clusters. This requires a
pre-existing VPC and subnets.
Arguments:
NETWORK_NAME: The human-readable name of the network configuration.
Usage:
databricks account networks create NETWORK_NAME [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--vpc-id string The ID of the VPC associated with this network.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account networks delete --help
Delete a network configuration.
Deletes a Databricks network configuration, which represents a cloud VPC and
its resources. You cannot delete a network that is associated with a
workspace.
This operation is available only if your account is on the E2 version of the
platform.
Arguments:
NETWORK_ID: Databricks Account API network configuration ID.
Usage:
databricks account networks delete NETWORK_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account networks get --help
Get a network configuration.
Gets a Databricks network configuration, which represents a cloud VPC and its
resources.
Arguments:
NETWORK_ID: Databricks Account API network configuration ID.
Usage:
databricks account networks get NETWORK_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account networks list --help
Get all network configurations.
Gets a list of all Databricks network configurations for an account, specified
by ID.
This operation is available only if your account is on the E2 version of the
platform.
Usage:
databricks account networks list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account networks create --help
trace $CLI account networks delete --help
trace $CLI account networks get --help
trace $CLI account networks list --help

View File

@ -0,0 +1,19 @@
>>> $CLI account o-auth-published-apps list --help
Get all the published OAuth apps.
Get all the available published OAuth apps in Databricks.
Usage:
databricks account o-auth-published-apps list [flags]
Flags:
-h, --help help for list
--page-size int The max number of OAuth published apps to return in one page.
--page-token string A token that can be used to get the next page of results.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,2 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account o-auth-published-apps list --help

View File

@ -0,0 +1,165 @@
>>> $CLI account personal-compute delete --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.
>>> $CLI account personal-compute get --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.
>>> $CLI account personal-compute update --help
Databricks Account Commands
Usage:
databricks account [command]
Identity and Access Management
access-control These APIs manage access rules on resources in an account.
groups Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
users User identities recognized by Databricks and represented by email addresses.
workspace-assignment The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
Unity Catalog
metastore-assignments These APIs manage metastore assignments to a workspace.
metastores These APIs manage Unity Catalog metastores for an account.
storage-credentials These APIs manage storage credentials for a particular metastore.
Settings
ip-access-lists The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
network-connectivity These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources.
settings Accounts Settings API allows users to manage settings at the account level.
Provisioning
credentials These APIs manage credential configurations for this workspace.
encryption-keys These APIs manage encryption key configurations for this workspace (optional).
networks These APIs manage network configurations for customer-managed VPCs (optional).
private-access These APIs manage private access settings for this account.
storage These APIs manage storage configurations for this workspace.
vpc-endpoints These APIs manage VPC endpoint configurations for this account.
workspaces These APIs manage workspaces for this account.
Billing
billable-usage This API allows you to download billable usage logs for the specified account and date range.
budgets These APIs manage budget configurations for this account.
log-delivery These APIs manage log delivery configurations for this account.
usage-dashboards These APIs manage usage dashboards for this account.
OAuth
custom-app-integration These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
o-auth-published-apps These APIs enable administrators to view all the available published OAuth applications in Databricks.
published-app-integration These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
service-principal-secrets These APIs enable administrators to manage service principal secrets.
Flags:
-h, --help help for account
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks account [command] --help" for more information about a command.

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account personal-compute delete --help
trace $CLI account personal-compute get --help
trace $CLI account personal-compute update --help

View File

@ -0,0 +1,157 @@
>>> $CLI account private-access create --help
Create private access settings.
Creates a private access settings object, which specifies how your workspace
is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must
have a private access settings object referenced by ID in the workspace's
private_access_settings_id property.
You can share one private access settings with multiple workspaces in a single
account. However, private access settings are specific to AWS regions, so only
workspaces in the same AWS region can use a given private access settings
object.
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
Arguments:
PRIVATE_ACCESS_SETTINGS_NAME: The human-readable name of the private access settings object.
REGION: The cloud region for workspaces associated with this private access
settings object.
Usage:
databricks account private-access create PRIVATE_ACCESS_SETTINGS_NAME REGION [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--private-access-level PrivateAccessLevel The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. Supported values: [ACCOUNT, ENDPOINT]
--public-access-enabled Determines if the workspace can be accessed over public internet.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account private-access delete --help
Delete a private access settings object.
Deletes a private access settings object, which determines how your workspace
is accessed over [AWS PrivateLink].
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].",
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
Arguments:
PRIVATE_ACCESS_SETTINGS_ID: Databricks Account API private access settings ID.
Usage:
databricks account private-access delete PRIVATE_ACCESS_SETTINGS_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account private-access get --help
Get a private access settings object.
Gets a private access settings object, which specifies how your workspace is
accessed over [AWS PrivateLink].
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].",
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
Arguments:
PRIVATE_ACCESS_SETTINGS_ID: Databricks Account API private access settings ID.
Usage:
databricks account private-access get PRIVATE_ACCESS_SETTINGS_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account private-access list --help
Get all private access settings objects.
Gets a list of all private access settings objects for an account, specified
by ID.
Usage:
databricks account private-access list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account private-access replace --help
Replace private access settings.
Updates an existing private access settings object, which specifies how your
workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a
workspace must have a private access settings object referenced by ID in the
workspace's private_access_settings_id property.
This operation completely overwrites your existing private access settings
object attached to your workspaces. All workspaces attached to the private
access settings are affected by any change. If public_access_enabled,
private_access_level, or allowed_vpc_endpoint_ids are updated, effects of
these changes might take several minutes to propagate to the workspace API.
You can share one private access settings object with multiple workspaces in a
single account. However, private access settings are specific to AWS regions,
so only workspaces in the same AWS region can use a given private access
settings object.
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
Arguments:
PRIVATE_ACCESS_SETTINGS_ID: Databricks Account API private access settings ID.
PRIVATE_ACCESS_SETTINGS_NAME: The human-readable name of the private access settings object.
REGION: The cloud region for workspaces associated with this private access
settings object.
Usage:
databricks account private-access replace PRIVATE_ACCESS_SETTINGS_ID PRIVATE_ACCESS_SETTINGS_NAME REGION [flags]
Flags:
-h, --help help for replace
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--private-access-level PrivateAccessLevel The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object. Supported values: [ACCOUNT, ENDPOINT]
--public-access-enabled Determines if the workspace can be accessed over public internet.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account private-access create --help
trace $CLI account private-access delete --help
trace $CLI account private-access get --help
trace $CLI account private-access list --help
trace $CLI account private-access replace --help

View File

@ -0,0 +1,96 @@
>>> $CLI account published-app-integration create --help
Create Published OAuth App Integration.
Create Published OAuth App Integration.
You can retrieve the published OAuth app integration via
:method:PublishedAppIntegration/get.
Usage:
databricks account published-app-integration create [flags]
Flags:
--app-id string App id of the OAuth published app integration.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account published-app-integration delete --help
Delete Published OAuth App Integration.
Delete an existing Published OAuth App Integration. You can retrieve the
published OAuth app integration via :method:PublishedAppIntegration/get.
Usage:
databricks account published-app-integration delete INTEGRATION_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account published-app-integration get --help
Get OAuth Published App Integration.
Gets the Published OAuth App Integration for the given integration id.
Usage:
databricks account published-app-integration get INTEGRATION_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account published-app-integration list --help
Get published oauth app integrations.
Get the list of published OAuth app integrations for the specified Databricks
account
Usage:
databricks account published-app-integration list [flags]
Flags:
-h, --help help for list
--page-size int
--page-token string
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account published-app-integration update --help
Updates Published OAuth App Integration.
Updates an existing published OAuth App Integration. You can retrieve the
published OAuth app integration via :method:PublishedAppIntegration/get.
Usage:
databricks account published-app-integration update INTEGRATION_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account published-app-integration create --help
trace $CLI account published-app-integration delete --help
trace $CLI account published-app-integration get --help
trace $CLI account published-app-integration list --help
trace $CLI account published-app-integration update --help

View File

@ -0,0 +1,103 @@
>>> $CLI account service-principal-federation-policy create --help
Create service principal federation policy.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal id for the federation policy.
Usage:
databricks account service-principal-federation-policy create SERVICE_PRINCIPAL_ID [flags]
Flags:
--description string Description of the federation policy.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string Resource name for the federation policy.
--policy-id string The identifier for the federation policy.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principal-federation-policy delete --help
Delete service principal federation policy.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal id for the federation policy.
POLICY_ID: The identifier for the federation policy.
Usage:
databricks account service-principal-federation-policy delete SERVICE_PRINCIPAL_ID POLICY_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principal-federation-policy get --help
Get service principal federation policy.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal id for the federation policy.
POLICY_ID: The identifier for the federation policy.
Usage:
databricks account service-principal-federation-policy get SERVICE_PRINCIPAL_ID POLICY_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principal-federation-policy list --help
List service principal federation policies.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal id for the federation policy.
Usage:
databricks account service-principal-federation-policy list SERVICE_PRINCIPAL_ID [flags]
Flags:
-h, --help help for list
--page-size int
--page-token string
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principal-federation-policy update --help
Update service principal federation policy.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal id for the federation policy.
POLICY_ID: The identifier for the federation policy.
Usage:
databricks account service-principal-federation-policy update SERVICE_PRINCIPAL_ID POLICY_ID [flags]
Flags:
--description string Description of the federation policy.
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string Resource name for the federation policy.
--update-mask string The field mask specifies which fields of the policy to update.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account service-principal-federation-policy create --help
trace $CLI account service-principal-federation-policy delete --help
trace $CLI account service-principal-federation-policy get --help
trace $CLI account service-principal-federation-policy list --help
trace $CLI account service-principal-federation-policy update --help

View File

@ -0,0 +1,64 @@
>>> $CLI account service-principal-secrets create --help
Create service principal secret.
Create a secret for the given service principal.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal ID.
Usage:
databricks account service-principal-secrets create SERVICE_PRINCIPAL_ID [flags]
Flags:
-h, --help help for create
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principal-secrets delete --help
Delete service principal secret.
Delete a secret from the given service principal.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal ID.
SECRET_ID: The secret ID.
Usage:
databricks account service-principal-secrets delete SERVICE_PRINCIPAL_ID SECRET_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principal-secrets list --help
List service principal secrets.
List all secrets associated with the given service principal. This operation
only returns information about the secrets themselves and does not include the
secret values.
Arguments:
SERVICE_PRINCIPAL_ID: The service principal ID.
Usage:
databricks account service-principal-secrets list SERVICE_PRINCIPAL_ID [flags]
Flags:
-h, --help help for list
--page-token string An opaque page token which was the next_page_token in the response of the previous request to list the secrets for this service principal.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account service-principal-secrets create --help
trace $CLI account service-principal-secrets delete --help
trace $CLI account service-principal-secrets list --help

View File

@ -0,0 +1,138 @@
>>> $CLI account service-principals create --help
Create a service principal.
Creates a new service principal in the Databricks account.
Usage:
databricks account service-principals create [flags]
Flags:
--active If this user is active.
--application-id string UUID relating to the service principal.
--display-name string String that represents a concatenation of given and family names.
--external-id string
-h, --help help for create
--id string Databricks service principal ID.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principals delete --help
Delete a service principal.
Delete a single service principal in the Databricks account.
Arguments:
ID: Unique ID for a service principal in the Databricks account.
Usage:
databricks account service-principals delete ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principals get --help
Get service principal details.
Gets the details for a single service principal define in the Databricks
account.
Arguments:
ID: Unique ID for a service principal in the Databricks account.
Usage:
databricks account service-principals get ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principals list --help
List service principals.
Gets the set of service principals associated with a Databricks account.
Usage:
databricks account service-principals list [flags]
Flags:
--attributes string Comma-separated list of attributes to return in response.
--count int Desired number of results per page.
--excluded-attributes string Comma-separated list of attributes to exclude in response.
--filter string Query by which the results have to be filtered.
-h, --help help for list
--sort-by string Attribute to sort the results.
--sort-order ListSortOrder The order to sort the results. Supported values: [ascending, descending]
--start-index int Specifies the index of the first result.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principals patch --help
Update service principal details.
Partially updates the details of a single service principal in the Databricks
account.
Arguments:
ID: Unique ID for a service principal in the Databricks account.
Usage:
databricks account service-principals patch ID [flags]
Flags:
-h, --help help for patch
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account service-principals update --help
Replace service principal.
Updates the details of a single service principal.
This action replaces the existing service principal with the same name.
Arguments:
ID: Databricks service principal ID.
Usage:
databricks account service-principals update ID [flags]
Flags:
--active If this user is active.
--application-id string UUID relating to the service principal.
--display-name string String that represents a concatenation of given and family names.
--external-id string
-h, --help help for update
--id string Databricks service principal ID.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,7 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account service-principals create --help
trace $CLI account service-principals delete --help
trace $CLI account service-principals get --help
trace $CLI account service-principals list --help
trace $CLI account service-principals patch --help
trace $CLI account service-principals update --help

View File

@ -0,0 +1,3 @@
script: line 65: syntax error near unexpected token `)'
Exit code: 2

View File

@ -0,0 +1 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.

View File

@ -0,0 +1,118 @@
>>> $CLI account storage-credentials create --help
Create a storage credential.
Creates a new storage credential. The request object is specific to the cloud:
* **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure
credentials * **GcpServiceAcountKey** for GCP credentials.
The caller must be a metastore admin and have the
**CREATE_STORAGE_CREDENTIAL** privilege on the metastore.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account storage-credentials create METASTORE_ID [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage-credentials delete --help
Delete a storage credential.
Deletes a storage credential from the metastore. The caller must be an owner
of the storage credential.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
STORAGE_CREDENTIAL_NAME: Name of the storage credential.
Usage:
databricks account storage-credentials delete METASTORE_ID STORAGE_CREDENTIAL_NAME [flags]
Flags:
--force Force deletion even if the Storage Credential is not empty.
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage-credentials get --help
Gets the named storage credential.
Gets a storage credential from the metastore. The caller must be a metastore
admin, the owner of the storage credential, or have a level of privilege on
the storage credential.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
STORAGE_CREDENTIAL_NAME: Name of the storage credential.
Usage:
databricks account storage-credentials get METASTORE_ID STORAGE_CREDENTIAL_NAME [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage-credentials list --help
Get all storage credentials assigned to a metastore.
Gets a list of all storage credentials that have been assigned to given
metastore.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
Usage:
databricks account storage-credentials list METASTORE_ID [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage-credentials update --help
Updates a storage credential.
Updates a storage credential on the metastore. The caller must be the owner of
the storage credential. If the caller is a metastore admin, only the __owner__
credential can be changed.
Arguments:
METASTORE_ID: Unity Catalog metastore ID
STORAGE_CREDENTIAL_NAME: Name of the storage credential.
Usage:
databricks account storage-credentials update METASTORE_ID STORAGE_CREDENTIAL_NAME [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account storage-credentials create --help
trace $CLI account storage-credentials delete --help
trace $CLI account storage-credentials get --help
trace $CLI account storage-credentials list --help
trace $CLI account storage-credentials update --help

View File

@ -0,0 +1,86 @@
>>> $CLI account storage create --help
Create new storage configuration.
Creates new storage configuration for an account, specified by ID. Uploads a
storage configuration object that represents the root AWS S3 bucket in your
account. Databricks stores related workspace assets including DBFS, cluster
logs, and job results. For the AWS S3 bucket, you need to configure the
required bucket policy.
For information about how to create a new workspace with this API, see [Create
a new workspace using the Account API]
[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
Usage:
databricks account storage create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage delete --help
Delete storage configuration.
Deletes a Databricks storage configuration. You cannot delete a storage
configuration that is associated with any workspace.
Arguments:
STORAGE_CONFIGURATION_ID: Databricks Account API storage configuration ID.
Usage:
databricks account storage delete STORAGE_CONFIGURATION_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage get --help
Get storage configuration.
Gets a Databricks storage configuration for an account, both specified by ID.
Arguments:
STORAGE_CONFIGURATION_ID: Databricks Account API storage configuration ID.
Usage:
databricks account storage get STORAGE_CONFIGURATION_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account storage list --help
Get all storage configurations.
Gets a list of all Databricks storage configurations for your account,
specified by ID.
Usage:
databricks account storage list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account storage create --help
trace $CLI account storage delete --help
trace $CLI account storage get --help
trace $CLI account storage list --help

View File

@ -0,0 +1,40 @@
>>> $CLI account usage-dashboards create --help
Create new usage dashboard.
Create a usage dashboard specified by workspaceId, accountId, and dashboard
type.
Usage:
databricks account usage-dashboards create [flags]
Flags:
--dashboard-type UsageDashboardType Workspace level usage dashboard shows usage data for the specified workspace ID. Supported values: [USAGE_DASHBOARD_TYPE_GLOBAL, USAGE_DASHBOARD_TYPE_WORKSPACE]
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--workspace-id int The workspace ID of the workspace in which the usage dashboard is created.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account usage-dashboards get --help
Get usage dashboard.
Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
Usage:
databricks account usage-dashboards get [flags]
Flags:
--dashboard-type UsageDashboardType Workspace level usage dashboard shows usage data for the specified workspace ID. Supported values: [USAGE_DASHBOARD_TYPE_GLOBAL, USAGE_DASHBOARD_TYPE_WORKSPACE]
-h, --help help for get
--workspace-id int The workspace ID of the workspace in which the usage dashboard is created.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,3 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account usage-dashboards create --help
trace $CLI account usage-dashboards get --help

View File

@ -0,0 +1,145 @@
>>> $CLI account users create --help
Create a new user.
Creates a new user in the Databricks account. This new user will also be added
to the Databricks account.
Usage:
databricks account users create [flags]
Flags:
--active If this user is active.
--display-name string String that represents a concatenation of given and family names.
--external-id string External ID is not currently supported.
-h, --help help for create
--id string Databricks user ID.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--user-name string Email address of the Databricks user.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account users delete --help
Delete a user.
Deletes a user. Deleting a user from a Databricks account also removes objects
associated with the user.
Arguments:
ID: Unique ID for a user in the Databricks account.
Usage:
databricks account users delete ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account users get --help
Get user details.
Gets information for a specific user in Databricks account.
Arguments:
ID: Unique ID for a user in the Databricks account.
Usage:
databricks account users get ID [flags]
Flags:
--attributes string Comma-separated list of attributes to return in response.
--count int Desired number of results per page.
--excluded-attributes string Comma-separated list of attributes to exclude in response.
--filter string Query by which the results have to be filtered.
-h, --help help for get
--sort-by string Attribute to sort the results.
--sort-order GetSortOrder The order to sort the results. Supported values: [ascending, descending]
--start-index int Specifies the index of the first result.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account users list --help
List users.
Gets details for all the users associated with a Databricks account.
Usage:
databricks account users list [flags]
Flags:
--attributes string Comma-separated list of attributes to return in response.
--count int Desired number of results per page.
--excluded-attributes string Comma-separated list of attributes to exclude in response.
--filter string Query by which the results have to be filtered.
-h, --help help for list
--sort-by string Attribute to sort the results.
--sort-order ListSortOrder The order to sort the results. Supported values: [ascending, descending]
--start-index int Specifies the index of the first result.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account users patch --help
Update user details.
Partially updates a user resource by applying the supplied operations on
specific user attributes.
Arguments:
ID: Unique ID for a user in the Databricks account.
Usage:
databricks account users patch ID [flags]
Flags:
-h, --help help for patch
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account users update --help
Replace a user.
Replaces a user's information with the data supplied in request.
Arguments:
ID: Databricks user ID. This is automatically set by Databricks. Any value
provided by the client will be ignored.
Usage:
databricks account users update ID [flags]
Flags:
--active If this user is active.
--display-name string String that represents a concatenation of given and family names.
--external-id string External ID is not currently supported.
-h, --help help for update
--id string Databricks user ID.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--user-name string Email address of the Databricks user.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,7 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account users create --help
trace $CLI account users delete --help
trace $CLI account users get --help
trace $CLI account users list --help
trace $CLI account users patch --help
trace $CLI account users update --help

View File

@ -0,0 +1,109 @@
>>> $CLI account vpc-endpoints create --help
Create VPC endpoint configuration.
Creates a VPC endpoint configuration, which represents a [VPC endpoint] object
in AWS used to communicate privately with Databricks over [AWS PrivateLink].
After you create the VPC endpoint configuration, the Databricks [endpoint
service] automatically accepts the VPC endpoint.
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
[VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html
[endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html
Arguments:
VPC_ENDPOINT_NAME: The human-readable name of the storage configuration.
Usage:
databricks account vpc-endpoints create VPC_ENDPOINT_NAME [flags]
Flags:
--aws-vpc-endpoint-id string The ID of the VPC endpoint object in AWS.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--region string The AWS region in which this VPC endpoint object exists.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account vpc-endpoints delete --help
Delete VPC endpoint configuration.
Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint]
that can communicate privately with Databricks over [AWS PrivateLink].
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
Arguments:
VPC_ENDPOINT_ID: Databricks VPC endpoint ID.
Usage:
databricks account vpc-endpoints delete VPC_ENDPOINT_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account vpc-endpoints get --help
Get a VPC endpoint configuration.
Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in
AWS used to communicate privately with Databricks over [AWS PrivateLink].
[AWS PrivateLink]: https://aws.amazon.com/privatelink
[VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
Arguments:
VPC_ENDPOINT_ID: Databricks VPC endpoint ID.
Usage:
databricks account vpc-endpoints get VPC_ENDPOINT_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account vpc-endpoints list --help
Get all VPC endpoint configurations.
Gets a list of all VPC endpoints for an account, specified by ID.
Before configuring PrivateLink, read the [Databricks article about
PrivateLink].
[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
Usage:
databricks account vpc-endpoints list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account vpc-endpoints create --help
trace $CLI account vpc-endpoints delete --help
trace $CLI account vpc-endpoints get --help
trace $CLI account vpc-endpoints list --help

View File

@ -0,0 +1,86 @@
>>> $CLI account workspace-assignment delete --help
Delete permissions assignment.
Deletes the workspace permissions assignment in a given account and workspace
for the specified principal.
Arguments:
WORKSPACE_ID: The workspace ID for the account.
PRINCIPAL_ID: The ID of the user, service principal, or group.
Usage:
databricks account workspace-assignment delete WORKSPACE_ID PRINCIPAL_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspace-assignment get --help
List workspace permissions.
Get an array of workspace permissions for the specified account and workspace.
Arguments:
WORKSPACE_ID: The workspace ID.
Usage:
databricks account workspace-assignment get WORKSPACE_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspace-assignment list --help
Get permission assignments.
Get the permission assignments for the specified Databricks account and
Databricks workspace.
Arguments:
WORKSPACE_ID: The workspace ID for the account.
Usage:
databricks account workspace-assignment list WORKSPACE_ID [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspace-assignment update --help
Create or update permissions assignment.
Creates or updates the workspace permissions assignment in a given account and
workspace for the specified principal.
Arguments:
WORKSPACE_ID: The workspace ID.
PRINCIPAL_ID: The ID of the user, service principal, or group.
Usage:
databricks account workspace-assignment update WORKSPACE_ID PRINCIPAL_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,5 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account workspace-assignment delete --help
trace $CLI account workspace-assignment get --help
trace $CLI account workspace-assignment list --help
trace $CLI account workspace-assignment update --help

View File

@ -0,0 +1,279 @@
>>> $CLI account workspaces create --help
Create a new workspace.
Creates a new workspace.
**Important**: This operation is asynchronous. A response with HTTP status
code 200 means the request has been accepted and is in progress, but does not
mean that the workspace deployed successfully and is running. The initial
workspace status is typically PROVISIONING. Use the workspace ID
(workspace_id) field in the response to identify the new workspace and make
repeated GET requests with the workspace ID and check its status. The
workspace becomes available when the status changes to RUNNING.
Arguments:
WORKSPACE_NAME: The workspace's human-readable name.
Usage:
databricks account workspaces create WORKSPACE_NAME [flags]
Flags:
--aws-region string The AWS region of the workspace's data plane.
--cloud string The cloud provider which the workspace uses.
--credentials-id string ID of the workspace's credential configuration object.
--deployment-name string The deployment name defines part of the subdomain for the workspace.
-h, --help help for create
--is-no-public-ip-enabled Whether no public IP is enabled for the workspace.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--location string The Google Cloud region of the workspace data plane in your Google account.
--managed-services-customer-managed-key-id string The ID of the workspace's managed services encryption key configuration object.
--network-id string
--no-wait do not wait to reach RUNNING state
--pricing-tier PricingTier The pricing tier of the workspace. Supported values: [
COMMUNITY_EDITION,
DEDICATED,
ENTERPRISE,
PREMIUM,
STANDARD,
UNKNOWN,
]
--private-access-settings-id string ID of the workspace's private access settings object.
--storage-configuration-id string The ID of the workspace's storage configuration object.
--storage-customer-managed-key-id string The ID of the workspace's storage encryption key configuration object.
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspaces delete --help
Delete a workspace.
Terminates and deletes a Databricks workspace. From an API perspective,
deletion is immediate. However, it might take a few minutes for all workspaces
resources to be deleted, depending on the size and number of workspace
resources.
This operation is available only if your account is on the E2 version of the
platform or on a select custom plan that allows multiple workspaces per
account.
Arguments:
WORKSPACE_ID: Workspace ID.
Usage:
databricks account workspaces delete WORKSPACE_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspaces get --help
Get a workspace.
Gets information including status for a Databricks workspace, specified by ID.
In the response, the workspace_status field indicates the current status.
After initial workspace creation (which is asynchronous), make repeated GET
requests with the workspace ID and check its status. The workspace becomes
available when the status changes to RUNNING.
For information about how to create a new workspace with this API **including
error handling**, see [Create a new workspace using the Account API].
This operation is available only if your account is on the E2 version of the
platform or on a select custom plan that allows multiple workspaces per
account.
[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
Arguments:
WORKSPACE_ID: Workspace ID.
Usage:
databricks account workspaces get WORKSPACE_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspaces list --help
Get all workspaces.
Gets a list of all workspaces associated with an account, specified by ID.
This operation is available only if your account is on the E2 version of the
platform or on a select custom plan that allows multiple workspaces per
account.
Usage:
databricks account workspaces list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI account workspaces update --help
Update workspace configuration.
Updates a workspace configuration for either a running workspace or a failed
workspace. The elements that can be updated varies between these two use
cases.
### Update a failed workspace You can update a Databricks workspace
configuration for failed workspace deployment for some fields, but not all
fields. For a failed workspace, this request supports updates to the following
fields only: - Credential configuration ID - Storage configuration ID -
Network configuration ID. Used only to add or change a network configuration
for a customer-managed VPC. For a failed workspace only, you can convert a
workspace with Databricks-managed VPC to use a customer-managed VPC by adding
this ID. You cannot downgrade a workspace with a customer-managed VPC to be a
Databricks-managed VPC. You can update the network configuration for a failed
or running workspace to add PrivateLink support, though you must also add a
private access settings object. - Key configuration ID for managed services
(control plane storage, such as notebook source and Databricks SQL queries).
Used only if you use customer-managed keys for managed services. - Key
configuration ID for workspace storage (root S3 bucket and, optionally, EBS
volumes). Used only if you use customer-managed keys for workspace storage.
**Important**: If the workspace was ever in the running state, even if briefly
before becoming a failed workspace, you cannot add a new key configuration ID
for workspace storage. - Private access settings ID to add PrivateLink
support. You can add or update the private access settings ID to upgrade a
workspace to add support for front-end, back-end, or both types of
connectivity. You cannot remove (downgrade) any existing front-end or back-end
PrivateLink support on a workspace. - Custom tags. Given you provide an empty
custom tags, the update would not be applied. - Network connectivity
configuration ID to add serverless stable IP support. You can add or update
the network connectivity configuration ID to ensure the workspace uses the
same set of stable IP CIDR blocks to access your resources. You cannot remove
a network connectivity configuration from the workspace once attached, you can
only switch to another one.
After calling the PATCH operation to update the workspace configuration,
make repeated GET requests with the workspace ID and check the workspace
status. The workspace is successful if the status changes to RUNNING.
For information about how to create a new workspace with this API **including
error handling**, see [Create a new workspace using the Account API].
### Update a running workspace You can update a Databricks workspace
configuration for running workspaces for some fields, but not all fields. For
a running workspace, this request supports updating the following fields only:
- Credential configuration ID - Network configuration ID. Used only if you
already use a customer-managed VPC. You cannot convert a running workspace
from a Databricks-managed VPC to a customer-managed VPC. You can use a network
configuration update in this API for a failed or running workspace to add
support for PrivateLink, although you also need to add a private access
settings object. - Key configuration ID for managed services (control plane
storage, such as notebook source and Databricks SQL queries). Databricks does
not directly encrypt the data with the customer-managed key (CMK). Databricks
uses both the CMK and the Databricks managed key (DMK) that is unique to your
workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to
encrypt your workspace's managed services persisted data. If the workspace
does not already have a CMK for managed services, adding this ID enables
managed services encryption for new or updated data. Existing managed services
data that existed before adding the key remains not encrypted with the DEK
until it is modified. If the workspace already has customer-managed keys for
managed services, this request rotates (changes) the CMK keys and the DEK is
re-encrypted with the DMK and the new CMK. - Key configuration ID for
workspace storage (root S3 bucket and, optionally, EBS volumes). You can set
this only if the workspace does not already have a customer-managed key
configuration for workspace storage. - Private access settings ID to add
PrivateLink support. You can add or update the private access settings ID to
upgrade a workspace to add support for front-end, back-end, or both types of
connectivity. You cannot remove (downgrade) any existing front-end or back-end
PrivateLink support on a workspace. - Custom tags. Given you provide an empty
custom tags, the update would not be applied. - Network connectivity
configuration ID to add serverless stable IP support. You can add or update
the network connectivity configuration ID to ensure the workspace uses the
same set of stable IP CIDR blocks to access your resources. You cannot remove
a network connectivity configuration from the workspace once attached, you can
only switch to another one.
**Important**: To update a running workspace, your workspace must have no
running compute resources that run in your workspace's VPC in the Classic data
plane. For example, stop all all-purpose clusters, job clusters, pools with
running clusters, and Classic SQL warehouses. If you do not terminate all
cluster instances in the workspace before calling this API, the request will
fail.
### Wait until changes take effect. After calling the PATCH operation to
update the workspace configuration, make repeated GET requests with the
workspace ID and check the workspace status and the status of the fields. *
For workspaces with a Databricks-managed VPC, the workspace status becomes
PROVISIONING temporarily (typically under 20 minutes). If the workspace
update is successful, the workspace status changes to RUNNING. Note that you
can also check the workspace status in the [Account Console]. However, you
cannot use or create clusters for another 20 minutes after that status change.
This results in a total of up to 40 minutes in which you cannot create
clusters. If you create or use clusters before this time interval elapses,
clusters do not launch successfully, fail, or could cause other unexpected
behavior. * For workspaces with a customer-managed VPC, the workspace status
stays at status RUNNING and the VPC change happens immediately. A change to
the storage customer-managed key configuration ID might take a few minutes to
update, so continue to check the workspace until you observe that it has been
updated. If the update fails, the workspace might revert silently to its
original configuration. After the workspace has been updated, you cannot use
or create clusters for another 20 minutes. If you create or use clusters
before this time interval elapses, clusters do not launch successfully, fail,
or could cause other unexpected behavior.
If you update the _storage_ customer-managed key configurations, it takes 20
minutes for the changes to fully take effect. During the 20 minute wait, it is
important that you stop all REST API calls to the DBFS API. If you are
modifying _only the managed services key configuration_, you can omit the 20
minute wait.
**Important**: Customer-managed keys and customer-managed VPCs are supported
by only some deployment types and subscription types. If you have questions
about availability, contact your Databricks representative.
This operation is available only if your account is on the E2 version of the
platform or on a select custom plan that allows multiple workspaces per
account.
[Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html
[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
Arguments:
WORKSPACE_ID: Workspace ID.
Usage:
databricks account workspaces update WORKSPACE_ID [flags]
Flags:
--aws-region string The AWS region of the workspace's data plane (for example, us-west-2).
--credentials-id string ID of the workspace's credential configuration object.
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--managed-services-customer-managed-key-id string The ID of the workspace's managed services encryption key configuration object.
--network-connectivity-config-id string
--network-id string The ID of the workspace's network configuration object.
--no-wait do not wait to reach RUNNING state
--private-access-settings-id string The ID of the workspace's private access settings configuration object.
--storage-configuration-id string The ID of the workspace's storage configuration object.
--storage-customer-managed-key-id string The ID of the key configuration object for workspace storage.
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI account workspaces create --help
trace $CLI account workspaces delete --help
trace $CLI account workspaces get --help
trace $CLI account workspaces list --help
trace $CLI account workspaces update --help

View File

@ -0,0 +1,5 @@
>>> $CLI access-control-proxy get-assignable-roles-for-resource --help
Error: unknown command "access-control-proxy" for "databricks"
Exit code: 1

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI access-control-proxy get-assignable-roles-for-resource --help
trace $CLI access-control-proxy get-rule-set --help
trace $CLI access-control-proxy update-rule-set --help

View File

@ -0,0 +1,16 @@
>>> $CLI access-control check-policy --help
Check access policy to a resource.
Usage:
databricks access-control check-policy [flags]
Flags:
-h, --help help for check-policy
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,2 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI access-control check-policy --help

View File

@ -0,0 +1,5 @@
>>> $CLI aibi-dashboard-embedding-access-policy delete --help
Error: unknown command "aibi-dashboard-embedding-access-policy" for "databricks"
Exit code: 1

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI aibi-dashboard-embedding-access-policy delete --help
trace $CLI aibi-dashboard-embedding-access-policy get --help
trace $CLI aibi-dashboard-embedding-access-policy update --help

View File

@ -0,0 +1,5 @@
>>> $CLI aibi-dashboard-embedding-approved-domains delete --help
Error: unknown command "aibi-dashboard-embedding-approved-domains" for "databricks"
Exit code: 1

View File

@ -0,0 +1,4 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI aibi-dashboard-embedding-approved-domains delete --help
trace $CLI aibi-dashboard-embedding-approved-domains get --help
trace $CLI aibi-dashboard-embedding-approved-domains update --help

View File

@ -0,0 +1,119 @@
>>> $CLI alerts-legacy create --help
Create an alert.
Creates an alert. An alert is a Databricks SQL object that periodically runs a
query, evaluates a condition of its result, and notifies users or notification
destinations if the condition was met.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/create instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Usage:
databricks alerts-legacy create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--parent string The identifier of the workspace folder containing the object.
--rearm int Number of seconds after being triggered before the alert rearms itself and can be triggered again.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts-legacy delete --help
Delete an alert.
Deletes an alert. Deleted alerts are no longer accessible and cannot be
restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to
the trash.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/delete instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Usage:
databricks alerts-legacy delete ALERT_ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts-legacy get --help
Get an alert.
Gets an alert.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/get instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Usage:
databricks alerts-legacy get ALERT_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts-legacy list --help
Get alerts.
Gets a list of alerts.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/list instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Usage:
databricks alerts-legacy list [flags]
Flags:
-h, --help help for list
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts-legacy update --help
Update an alert.
Updates an alert.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/update instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Usage:
databricks alerts-legacy update ALERT_ID [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--rearm int Number of seconds after being triggered before the alert rearms itself and can be triggered again.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI alerts-legacy create --help
trace $CLI alerts-legacy delete --help
trace $CLI alerts-legacy get --help
trace $CLI alerts-legacy list --help
trace $CLI alerts-legacy update --help

View File

@ -0,0 +1,100 @@
>>> $CLI alerts create --help
Create an alert.
Creates an alert.
Usage:
databricks alerts create [flags]
Flags:
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts delete --help
Delete an alert.
Moves an alert to the trash. Trashed alerts immediately disappear from
searches and list views, and can no longer trigger. You can restore a trashed
alert through the UI. A trashed alert is permanently deleted after 30 days.
Usage:
databricks alerts delete ID [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts get --help
Get an alert.
Gets an alert.
Usage:
databricks alerts get ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts list --help
List alerts.
Gets a list of alerts accessible to the user, ordered by creation time.
**Warning:** Calling this API concurrently 10 or more times could result in
throttling, service degradation, or a temporary ban.
Usage:
databricks alerts list [flags]
Flags:
-h, --help help for list
--page-size int
--page-token string
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI alerts update --help
Update an alert.
Updates an alert.
Arguments:
ID:
UPDATE_MASK: Field mask is required to be passed into the PATCH request. Field mask
specifies which fields of the setting payload will be updated. The field
mask needs to be supplied as single string. To specify multiple fields in
the field mask, use comma as the separator (no space).
Usage:
databricks alerts update ID UPDATE_MASK [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI alerts create --help
trace $CLI alerts delete --help
trace $CLI alerts get --help
trace $CLI alerts list --help
trace $CLI alerts update --help

View File

@ -0,0 +1,308 @@
>>> $CLI apps create --help
Create an app.
Creates a new app.
Arguments:
NAME: The name of the app. The name must contain only lowercase alphanumeric
characters and hyphens. It must be unique within the workspace.
Usage:
databricks apps create NAME [flags]
Flags:
--description string The description of the app.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--no-compute If true, the app will not be started after creation.
--no-wait do not wait to reach ACTIVE state
--timeout duration maximum amount of time to reach ACTIVE state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps delete --help
Delete an app.
Deletes an app.
Arguments:
NAME: The name of the app.
Usage:
databricks apps delete NAME [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps deploy --help
Create an app deployment.
Creates an app deployment for the app with the supplied name.
Arguments:
APP_NAME: The name of the app.
Usage:
databricks apps deploy APP_NAME [flags]
Flags:
--deployment-id string The unique id of the deployment.
-h, --help help for deploy
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--mode AppDeploymentMode The mode of which the deployment will manage the source code. Supported values: [AUTO_SYNC, SNAPSHOT]
--no-wait do not wait to reach SUCCEEDED state
--source-code-path string The workspace file system path of the source code used to create the app deployment.
--timeout duration maximum amount of time to reach SUCCEEDED state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps get --help
Get an app.
Retrieves information for the app with the supplied name.
Arguments:
NAME: The name of the app.
Usage:
databricks apps get NAME [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps get-deployment --help
Get an app deployment.
Retrieves information for the app deployment with the supplied name and
deployment id.
Arguments:
APP_NAME: The name of the app.
DEPLOYMENT_ID: The unique id of the deployment.
Usage:
databricks apps get-deployment APP_NAME DEPLOYMENT_ID [flags]
Flags:
-h, --help help for get-deployment
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps get-permission-levels --help
Get app permission levels.
Gets the permission levels that a user can have on an object.
Arguments:
APP_NAME: The app for which to get or manage permissions.
Usage:
databricks apps get-permission-levels APP_NAME [flags]
Flags:
-h, --help help for get-permission-levels
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps get-permissions --help
Get app permissions.
Gets the permissions of an app. Apps can inherit permissions from their root
object.
Arguments:
APP_NAME: The app for which to get or manage permissions.
Usage:
databricks apps get-permissions APP_NAME [flags]
Flags:
-h, --help help for get-permissions
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps list --help
List apps.
Lists all apps in the workspace.
Usage:
databricks apps list [flags]
Flags:
-h, --help help for list
--page-size int Upper bound for items returned.
--page-token string Pagination token to go to the next page of apps.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps list-deployments --help
List app deployments.
Lists all app deployments for the app with the supplied name.
Arguments:
APP_NAME: The name of the app.
Usage:
databricks apps list-deployments APP_NAME [flags]
Flags:
-h, --help help for list-deployments
--page-size int Upper bound for items returned.
--page-token string Pagination token to go to the next page of apps.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps set-permissions --help
Set app permissions.
Sets permissions on an object, replacing existing permissions if they exist.
Deletes all direct permissions if none are specified. Objects can inherit
permissions from their root object.
Arguments:
APP_NAME: The app for which to get or manage permissions.
Usage:
databricks apps set-permissions APP_NAME [flags]
Flags:
-h, --help help for set-permissions
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps start --help
Start an app.
Start the last active deployment of the app in the workspace.
Arguments:
NAME: The name of the app.
Usage:
databricks apps start NAME [flags]
Flags:
-h, --help help for start
--no-wait do not wait to reach ACTIVE state
--timeout duration maximum amount of time to reach ACTIVE state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps stop --help
Stop an app.
Stops the active deployment of the app in the workspace.
Arguments:
NAME: The name of the app.
Usage:
databricks apps stop NAME [flags]
Flags:
-h, --help help for stop
--no-wait do not wait to reach STOPPED state
--timeout duration maximum amount of time to reach STOPPED state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps update --help
Update an app.
Updates the app with the supplied name.
Arguments:
NAME: The name of the app. The name must contain only lowercase alphanumeric
characters and hyphens. It must be unique within the workspace.
Usage:
databricks apps update NAME [flags]
Flags:
--description string The description of the app.
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI apps update-permissions --help
Update app permissions.
Updates the permissions on an app. Apps can inherit permissions from their
root object.
Arguments:
APP_NAME: The app for which to get or manage permissions.
Usage:
databricks apps update-permissions APP_NAME [flags]
Flags:
-h, --help help for update-permissions
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

15
acceptance/help/cmd/workspace/apps/apps/script generated Executable file
View File

@ -0,0 +1,15 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI apps create --help
trace $CLI apps delete --help
trace $CLI apps deploy --help
trace $CLI apps get --help
trace $CLI apps get-deployment --help
trace $CLI apps get-permission-levels --help
trace $CLI apps get-permissions --help
trace $CLI apps list --help
trace $CLI apps list-deployments --help
trace $CLI apps set-permissions --help
trace $CLI apps start --help
trace $CLI apps stop --help
trace $CLI apps update --help
trace $CLI apps update-permissions --help

View File

@ -0,0 +1,44 @@
>>> $CLI artifact-allowlists get --help
Get an artifact allowlist.
Get the artifact allowlist of a certain artifact type. The caller must be a
metastore admin or have the **MANAGE ALLOWLIST** privilege on the metastore.
Arguments:
ARTIFACT_TYPE: The artifact type of the allowlist.
Usage:
databricks artifact-allowlists get ARTIFACT_TYPE [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI artifact-allowlists update --help
Set an artifact allowlist.
Set the artifact allowlist of a certain artifact type. The whole artifact
allowlist is replaced with the new allowlist. The caller must be a metastore
admin or have the **MANAGE ALLOWLIST** privilege on the metastore.
Arguments:
ARTIFACT_TYPE: The artifact type of the allowlist.
Usage:
databricks artifact-allowlists update ARTIFACT_TYPE [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,3 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI artifact-allowlists get --help
trace $CLI artifact-allowlists update --help

View File

@ -0,0 +1,5 @@
>>> $CLI automatic-cluster-update get --help
Error: unknown command "automatic-cluster-update" for "databricks"
Exit code: 1

View File

@ -0,0 +1,3 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI automatic-cluster-update get --help
trace $CLI automatic-cluster-update update --help

View File

@ -0,0 +1,124 @@
>>> $CLI catalogs create --help
Create a catalog.
Creates a new catalog instance in the parent metastore if the caller is a
metastore admin or has the **CREATE_CATALOG** privilege.
Arguments:
NAME: Name of catalog.
Usage:
databricks catalogs create NAME [flags]
Flags:
--comment string User-provided free-form text description.
--connection-name string The name of the connection to an external data source.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--provider-name string The name of delta sharing provider.
--share-name string The name of the share under the share provider.
--storage-root string Storage root URL for managed tables within catalog.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI catalogs delete --help
Delete a catalog.
Deletes the catalog that matches the supplied name. The caller must be a
metastore admin or the owner of the catalog.
Arguments:
NAME: The name of the catalog.
Usage:
databricks catalogs delete NAME [flags]
Flags:
--force Force deletion even if the catalog is not empty.
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI catalogs get --help
Get a catalog.
Gets the specified catalog in a metastore. The caller must be a metastore
admin, the owner of the catalog, or a user that has the **USE_CATALOG**
privilege set for their account.
Arguments:
NAME: The name of the catalog.
Usage:
databricks catalogs get NAME [flags]
Flags:
-h, --help help for get
--include-browse Whether to include catalogs in the response for which the principal can only access selective metadata for.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI catalogs list --help
List catalogs.
Gets an array of catalogs in the metastore. If the caller is the metastore
admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the
caller (or for which the caller has the **USE_CATALOG** privilege) will be
retrieved. There is no guarantee of a specific ordering of the elements in the
array.
Usage:
databricks catalogs list [flags]
Flags:
-h, --help help for list
--include-browse Whether to include catalogs in the response for which the principal can only access selective metadata for.
--max-results int Maximum number of catalogs to return.
--page-token string Opaque pagination token to go to next page based on previous query.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI catalogs update --help
Update a catalog.
Updates the catalog that matches the supplied name. The caller must be either
the owner of the catalog, or a metastore admin (when changing the owner field
of the catalog).
Arguments:
NAME: The name of the catalog.
Usage:
databricks catalogs update NAME [flags]
Flags:
--comment string User-provided free-form text description.
--enable-predictive-optimization EnablePredictiveOptimization Whether predictive optimization should be enabled for this object and objects under it. Supported values: [DISABLE, ENABLE, INHERIT]
-h, --help help for update
--isolation-mode CatalogIsolationMode Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATED, OPEN]
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--new-name string New name for the catalog.
--owner string Username of current owner of catalog.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI catalogs create --help
trace $CLI catalogs delete --help
trace $CLI catalogs get --help
trace $CLI catalogs list --help
trace $CLI catalogs update --help

View File

@ -0,0 +1,124 @@
>>> $CLI clean-room-assets create --help
Create an asset.
Create a clean room asset —share an asset like a notebook or table into the
clean room. For each UC asset that is added through this method, the clean
room owner must also have enough privilege on the asset to consume it. The
privilege must be maintained indefinitely for the clean room to be able to
access the asset. Typically, you should use a group as the clean room owner.
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
Usage:
databricks clean-room-assets create CLEAN_ROOM_NAME [flags]
Flags:
--asset-type CleanRoomAssetAssetType The type of the asset. Supported values: [FOREIGN_TABLE, NOTEBOOK_FILE, TABLE, VIEW, VOLUME]
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string A fully qualified name that uniquely identifies the asset within the clean room.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-room-assets delete --help
Delete an asset.
Delete a clean room asset - unshare/remove the asset from the clean room
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
ASSET_TYPE: The type of the asset.
ASSET_FULL_NAME: The fully qualified name of the asset, it is same as the name field in
CleanRoomAsset.
Usage:
databricks clean-room-assets delete CLEAN_ROOM_NAME ASSET_TYPE ASSET_FULL_NAME [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-room-assets get --help
Get an asset.
Get the details of a clean room asset by its type and full name.
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
ASSET_TYPE: The type of the asset.
ASSET_FULL_NAME: The fully qualified name of the asset, it is same as the name field in
CleanRoomAsset.
Usage:
databricks clean-room-assets get CLEAN_ROOM_NAME ASSET_TYPE ASSET_FULL_NAME [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-room-assets list --help
List assets.
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
Usage:
databricks clean-room-assets list CLEAN_ROOM_NAME [flags]
Flags:
-h, --help help for list
--page-token string Opaque pagination token to go to next page based on previous query.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-room-assets update --help
Update an asset.
Update a clean room asset. For example, updating the content of a notebook;
changing the shared partitions of a table; etc.
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
ASSET_TYPE: The type of the asset.
NAME: A fully qualified name that uniquely identifies the asset within the clean
room. This is also the name displayed in the clean room UI.
For UC securable assets (tables, volumes, etc.), the format is
*shared_catalog*.*shared_schema*.*asset_name*
For notebooks, the name is the notebook file name.
Usage:
databricks clean-room-assets update CLEAN_ROOM_NAME ASSET_TYPE NAME [flags]
Flags:
--asset-type CleanRoomAssetAssetType The type of the asset. Supported values: [FOREIGN_TABLE, NOTEBOOK_FILE, TABLE, VIEW, VOLUME]
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string A fully qualified name that uniquely identifies the asset within the clean room.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,6 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI clean-room-assets create --help
trace $CLI clean-room-assets delete --help
trace $CLI clean-room-assets get --help
trace $CLI clean-room-assets list --help
trace $CLI clean-room-assets update --help

View File

@ -0,0 +1,23 @@
>>> $CLI clean-room-task-runs list --help
List notebook task runs.
List all the historical notebook task runs in a clean room.
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
Usage:
databricks clean-room-task-runs list CLEAN_ROOM_NAME [flags]
Flags:
-h, --help help for list
--notebook-name string Notebook name.
--page-size int The maximum number of task runs to return.
--page-token string Opaque pagination token to go to next page based on previous query.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,2 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI clean-room-task-runs list --help

View File

@ -0,0 +1,134 @@
>>> $CLI clean-rooms create --help
Create a clean room.
Create a new clean room with the specified collaborators. This method is
asynchronous; the returned name field inside the clean_room field can be used
to poll the clean room status, using the :method:cleanrooms/get method. When
this method returns, the cluster will be in a PROVISIONING state. The cluster
will be usable once it enters an ACTIVE state.
The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM**
privilege on the metastore.
Usage:
databricks clean-rooms create [flags]
Flags:
--comment string
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--name string The name of the clean room.
--owner string This is Databricks username of the owner of the local clean room securable for permission management.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-rooms create-output-catalog --help
Create an output catalog.
Create the output catalog of the clean room.
Arguments:
CLEAN_ROOM_NAME: Name of the clean room.
Usage:
databricks clean-rooms create-output-catalog CLEAN_ROOM_NAME [flags]
Flags:
--catalog-name string The name of the output catalog in UC.
-h, --help help for create-output-catalog
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-rooms delete --help
Delete a clean room.
Delete a clean room. After deletion, the clean room will be removed from the
metastore. If the other collaborators have not deleted the clean room, they
will still have the clean room in their metastore, but it will be in a DELETED
state and no operations other than deletion can be performed on it.
Arguments:
NAME: Name of the clean room.
Usage:
databricks clean-rooms delete NAME [flags]
Flags:
-h, --help help for delete
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-rooms get --help
Get a clean room.
Get the details of a clean room given its name.
Usage:
databricks clean-rooms get NAME [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-rooms list --help
List clean rooms.
Get a list of all clean rooms of the metastore. Only clean rooms the caller
has access to are returned.
Usage:
databricks clean-rooms list [flags]
Flags:
-h, --help help for list
--page-size int Maximum number of clean rooms to return (i.e., the page length).
--page-token string Opaque pagination token to go to next page based on previous query.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clean-rooms update --help
Update a clean room.
Update a clean room. The caller must be the owner of the clean room, have
**MODIFY_CLEAN_ROOM** privilege, or be metastore admin.
When the caller is a metastore admin, only the __owner__ field can be updated.
Arguments:
NAME: Name of the clean room.
Usage:
databricks clean-rooms update NAME [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,7 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI clean-rooms create --help
trace $CLI clean-rooms create-output-catalog --help
trace $CLI clean-rooms delete --help
trace $CLI clean-rooms get --help
trace $CLI clean-rooms list --help
trace $CLI clean-rooms update --help

View File

@ -0,0 +1,199 @@
>>> $CLI cluster-policies create --help
Create a new policy.
Creates a new policy with prescribed settings.
Usage:
databricks cluster-policies create [flags]
Flags:
--definition string Policy definition document expressed in [Databricks Cluster Policy Definition Language](https://docs.databricks.com/administration-guide/clusters/policy-definition.html).
--description string Additional human-readable description of the cluster policy.
-h, --help help for create
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--max-clusters-per-user int Max number of clusters per user that can be active using this policy.
--name string Cluster Policy name requested by the user.
--policy-family-definition-overrides string Policy definition JSON document expressed in [Databricks Policy Definition Language](https://docs.databricks.com/administration-guide/clusters/policy-definition.html).
--policy-family-id string ID of the policy family.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies delete --help
Delete a cluster policy.
Delete a policy for a cluster. Clusters governed by this policy can still run,
but cannot be edited.
Arguments:
POLICY_ID: The ID of the policy to delete.
Usage:
databricks cluster-policies delete POLICY_ID [flags]
Flags:
-h, --help help for delete
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies edit --help
Update a cluster policy.
Update an existing policy for cluster. This operation may make some clusters
governed by the previous policy invalid.
Arguments:
POLICY_ID: The ID of the policy to update.
Usage:
databricks cluster-policies edit POLICY_ID [flags]
Flags:
--definition string Policy definition document expressed in [Databricks Cluster Policy Definition Language](https://docs.databricks.com/administration-guide/clusters/policy-definition.html).
--description string Additional human-readable description of the cluster policy.
-h, --help help for edit
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--max-clusters-per-user int Max number of clusters per user that can be active using this policy.
--name string Cluster Policy name requested by the user.
--policy-family-definition-overrides string Policy definition JSON document expressed in [Databricks Policy Definition Language](https://docs.databricks.com/administration-guide/clusters/policy-definition.html).
--policy-family-id string ID of the policy family.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies get --help
Get a cluster policy.
Get a cluster policy entity. Creation and editing is available to admins only.
Arguments:
POLICY_ID: Canonical unique identifier for the Cluster Policy.
Usage:
databricks cluster-policies get POLICY_ID [flags]
Flags:
-h, --help help for get
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies get-permission-levels --help
Get cluster policy permission levels.
Gets the permission levels that a user can have on an object.
Arguments:
CLUSTER_POLICY_ID: The cluster policy for which to get or manage permissions.
Usage:
databricks cluster-policies get-permission-levels CLUSTER_POLICY_ID [flags]
Flags:
-h, --help help for get-permission-levels
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies get-permissions --help
Get cluster policy permissions.
Gets the permissions of a cluster policy. Cluster policies can inherit
permissions from their root object.
Arguments:
CLUSTER_POLICY_ID: The cluster policy for which to get or manage permissions.
Usage:
databricks cluster-policies get-permissions CLUSTER_POLICY_ID [flags]
Flags:
-h, --help help for get-permissions
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies list --help
List cluster policies.
Returns a list of policies accessible by the requesting user.
Usage:
databricks cluster-policies list [flags]
Flags:
-h, --help help for list
--sort-column ListSortColumn The cluster policy attribute to sort by. Supported values: [POLICY_CREATION_TIME, POLICY_NAME]
--sort-order ListSortOrder The order in which the policies get listed. Supported values: [ASC, DESC]
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies set-permissions --help
Set cluster policy permissions.
Sets permissions on an object, replacing existing permissions if they exist.
Deletes all direct permissions if none are specified. Objects can inherit
permissions from their root object.
Arguments:
CLUSTER_POLICY_ID: The cluster policy for which to get or manage permissions.
Usage:
databricks cluster-policies set-permissions CLUSTER_POLICY_ID [flags]
Flags:
-h, --help help for set-permissions
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI cluster-policies update-permissions --help
Update cluster policy permissions.
Updates the permissions on a cluster policy. Cluster policies can inherit
permissions from their root object.
Arguments:
CLUSTER_POLICY_ID: The cluster policy for which to get or manage permissions.
Usage:
databricks cluster-policies update-permissions CLUSTER_POLICY_ID [flags]
Flags:
-h, --help help for update-permissions
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,10 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI cluster-policies create --help
trace $CLI cluster-policies delete --help
trace $CLI cluster-policies edit --help
trace $CLI cluster-policies get --help
trace $CLI cluster-policies get-permission-levels --help
trace $CLI cluster-policies get-permissions --help
trace $CLI cluster-policies list --help
trace $CLI cluster-policies set-permissions --help
trace $CLI cluster-policies update-permissions --help

View File

@ -0,0 +1,586 @@
>>> $CLI clusters change-owner --help
Change cluster owner.
Change the owner of the cluster. You must be an admin and the cluster must be
terminated to perform this operation. The service principal application ID can
be supplied as an argument to owner_username.
Arguments:
CLUSTER_ID: <needs content added>
OWNER_USERNAME: New owner of the cluster_id after this RPC.
Usage:
databricks clusters change-owner CLUSTER_ID OWNER_USERNAME [flags]
Flags:
-h, --help help for change-owner
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters create --help
Create new cluster.
Creates a new Spark cluster. This method will acquire new instances from the
cloud provider if necessary. Note: Databricks may not be able to acquire some
of the requested nodes, due to cloud provider limitations (account limits,
spot price, etc.) or transient network issues.
If Databricks acquires at least 85% of the requested on-demand nodes, cluster
creation will succeed. Otherwise the cluster will terminate with an
informative error message.
Rather than authoring the cluster's JSON definition from scratch, Databricks
recommends filling out the [create compute UI] and then copying the generated
JSON definition from the UI.
[create compute UI]: https://docs.databricks.com/compute/configure.html
Arguments:
SPARK_VERSION: The Spark version of the cluster, e.g. 3.3.x-scala2.11. A list of
available Spark versions can be retrieved by using the
:method:clusters/sparkVersions API call.
Usage:
databricks clusters create SPARK_VERSION [flags]
Flags:
--apply-policy-default-values When set to true, fixed and default values from the policy will be used for fields that are omitted.
--autotermination-minutes int Automatically terminates the cluster after it is inactive for this time in minutes.
--cluster-name string Cluster name requested by the user.
--data-security-mode DataSecurityMode Data security mode decides what data governance model to use when accessing data from a cluster. Supported values: [
DATA_SECURITY_MODE_AUTO,
DATA_SECURITY_MODE_DEDICATED,
DATA_SECURITY_MODE_STANDARD,
LEGACY_PASSTHROUGH,
LEGACY_SINGLE_USER,
LEGACY_SINGLE_USER_STANDARD,
LEGACY_TABLE_ACL,
NONE,
SINGLE_USER,
USER_ISOLATION,
]
--driver-instance-pool-id string The optional ID of the instance pool for the driver of the cluster belongs.
--driver-node-type-id string The node type of the Spark driver.
--enable-elastic-disk Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.
--enable-local-disk-encryption Whether to enable LUKS on cluster VMs' local disks.
-h, --help help for create
--instance-pool-id string The optional ID of the instance pool to which the cluster belongs.
--is-single-node This field can only be used with kind.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--kind Kind The kind of compute described by this compute specification. Supported values: [CLASSIC_PREVIEW]
--no-wait do not wait to reach RUNNING state
--node-type-id string This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.
--num-workers int Number of worker nodes that this cluster should have.
--policy-id string The ID of the cluster policy used to create the cluster if applicable.
--runtime-engine RuntimeEngine Determines the cluster's runtime engine, either standard or Photon. Supported values: [NULL, PHOTON, STANDARD]
--single-user-name string Single user name if data_security_mode is SINGLE_USER.
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
--use-ml-runtime This field can only be used with kind.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters delete --help
Terminate cluster.
Terminates the Spark cluster with the specified ID. The cluster is removed
asynchronously. Once the termination has completed, the cluster will be in a
TERMINATED state. If the cluster is already in a TERMINATING or
TERMINATED state, nothing will happen.
Arguments:
CLUSTER_ID: The cluster to be terminated.
Usage:
databricks clusters delete CLUSTER_ID [flags]
Flags:
-h, --help help for delete
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--no-wait do not wait to reach TERMINATED state
--timeout duration maximum amount of time to reach TERMINATED state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters edit --help
Update cluster configuration.
Updates the configuration of a cluster to match the provided attributes and
size. A cluster can be updated if it is in a RUNNING or TERMINATED state.
If a cluster is updated while in a RUNNING state, it will be restarted so
that the new attributes can take effect.
If a cluster is updated while in a TERMINATED state, it will remain
TERMINATED. The next time it is started using the clusters/start API, the
new attributes will take effect. Any attempt to update a cluster in any other
state will be rejected with an INVALID_STATE error code.
Clusters created by the Databricks Jobs service cannot be edited.
Arguments:
CLUSTER_ID: ID of the cluster
SPARK_VERSION: The Spark version of the cluster, e.g. 3.3.x-scala2.11. A list of
available Spark versions can be retrieved by using the
:method:clusters/sparkVersions API call.
Usage:
databricks clusters edit CLUSTER_ID SPARK_VERSION [flags]
Flags:
--apply-policy-default-values When set to true, fixed and default values from the policy will be used for fields that are omitted.
--autotermination-minutes int Automatically terminates the cluster after it is inactive for this time in minutes.
--cluster-name string Cluster name requested by the user.
--data-security-mode DataSecurityMode Data security mode decides what data governance model to use when accessing data from a cluster. Supported values: [
DATA_SECURITY_MODE_AUTO,
DATA_SECURITY_MODE_DEDICATED,
DATA_SECURITY_MODE_STANDARD,
LEGACY_PASSTHROUGH,
LEGACY_SINGLE_USER,
LEGACY_SINGLE_USER_STANDARD,
LEGACY_TABLE_ACL,
NONE,
SINGLE_USER,
USER_ISOLATION,
]
--driver-instance-pool-id string The optional ID of the instance pool for the driver of the cluster belongs.
--driver-node-type-id string The node type of the Spark driver.
--enable-elastic-disk Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.
--enable-local-disk-encryption Whether to enable LUKS on cluster VMs' local disks.
-h, --help help for edit
--instance-pool-id string The optional ID of the instance pool to which the cluster belongs.
--is-single-node This field can only be used with kind.
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--kind Kind The kind of compute described by this compute specification. Supported values: [CLASSIC_PREVIEW]
--no-wait do not wait to reach RUNNING state
--node-type-id string This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.
--num-workers int Number of worker nodes that this cluster should have.
--policy-id string The ID of the cluster policy used to create the cluster if applicable.
--runtime-engine RuntimeEngine Determines the cluster's runtime engine, either standard or Photon. Supported values: [NULL, PHOTON, STANDARD]
--single-user-name string Single user name if data_security_mode is SINGLE_USER.
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
--use-ml-runtime This field can only be used with kind.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters events --help
List cluster activity events.
Retrieves a list of events about the activity of a cluster. This API is
paginated. If there are more events to read, the response includes all the
nparameters necessary to request the next page of events.
Arguments:
CLUSTER_ID: The ID of the cluster to retrieve events about.
Usage:
databricks clusters events CLUSTER_ID [flags]
Flags:
--end-time int The end time in epoch milliseconds.
-h, --help help for events
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--limit int The maximum number of events to include in a page of events.
--offset int The offset in the result set.
--order GetEventsOrder The order to list events in; either "ASC" or "DESC". Supported values: [ASC, DESC]
--start-time int The start time in epoch milliseconds.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters get --help
Get cluster info.
Retrieves the information for a cluster given its identifier. Clusters can be
described while they are running, or up to 60 days after they are terminated.
Arguments:
CLUSTER_ID: The cluster about which to retrieve information.
Usage:
databricks clusters get CLUSTER_ID [flags]
Flags:
-h, --help help for get
--no-wait do not wait to reach RUNNING state
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters get-permission-levels --help
Get cluster permission levels.
Gets the permission levels that a user can have on an object.
Arguments:
CLUSTER_ID: The cluster for which to get or manage permissions.
Usage:
databricks clusters get-permission-levels CLUSTER_ID [flags]
Flags:
-h, --help help for get-permission-levels
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters get-permissions --help
Get cluster permissions.
Gets the permissions of a cluster. Clusters can inherit permissions from their
root object.
Arguments:
CLUSTER_ID: The cluster for which to get or manage permissions.
Usage:
databricks clusters get-permissions CLUSTER_ID [flags]
Flags:
-h, --help help for get-permissions
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters list --help
List clusters.
Return information about all pinned and active clusters, and all clusters
terminated within the last 30 days. Clusters terminated prior to this period
are not included.
Usage:
databricks clusters list [flags]
Flags:
--cluster-sources []string Filter clusters by source
--cluster-states []string Filter clusters by states
-h, --help help for list
--is-pinned Filter clusters by pinned status
--page-size int Use this field to specify the maximum number of results to be returned by the server.
--page-token string Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of clusters respectively.
--policy-id string Filter clusters by policy id
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters list-node-types --help
List node types.
Returns a list of supported Spark node types. These node types can be used to
launch a cluster.
Usage:
databricks clusters list-node-types [flags]
Flags:
-h, --help help for list-node-types
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters list-zones --help
List availability zones.
Returns a list of availability zones where clusters can be created in (For
example, us-west-2a). These zones can be used to launch a cluster.
Usage:
databricks clusters list-zones [flags]
Flags:
-h, --help help for list-zones
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters permanent-delete --help
Permanently delete cluster.
Permanently deletes a Spark cluster. This cluster is terminated and resources
are asynchronously removed.
In addition, users will no longer see permanently deleted clusters in the
cluster list, and API users can no longer perform any action on permanently
deleted clusters.
Arguments:
CLUSTER_ID: The cluster to be deleted.
Usage:
databricks clusters permanent-delete CLUSTER_ID [flags]
Flags:
-h, --help help for permanent-delete
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters pin --help
Pin cluster.
Pinning a cluster ensures that the cluster will always be returned by the
ListClusters API. Pinning a cluster that is already pinned will have no
effect. This API can only be called by workspace admins.
Arguments:
CLUSTER_ID: <needs content added>
Usage:
databricks clusters pin CLUSTER_ID [flags]
Flags:
-h, --help help for pin
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters resize --help
Resize cluster.
Resizes a cluster to have a desired number of workers. This will fail unless
the cluster is in a RUNNING state.
Arguments:
CLUSTER_ID: The cluster to be resized.
Usage:
databricks clusters resize CLUSTER_ID [flags]
Flags:
-h, --help help for resize
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--no-wait do not wait to reach RUNNING state
--num-workers int Number of worker nodes that this cluster should have.
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters restart --help
Restart cluster.
Restarts a Spark cluster with the supplied ID. If the cluster is not currently
in a RUNNING state, nothing will happen.
Arguments:
CLUSTER_ID: The cluster to be started.
Usage:
databricks clusters restart CLUSTER_ID [flags]
Flags:
-h, --help help for restart
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--no-wait do not wait to reach RUNNING state
--restart-user string <needs content added>.
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters set-permissions --help
Set cluster permissions.
Sets permissions on an object, replacing existing permissions if they exist.
Deletes all direct permissions if none are specified. Objects can inherit
permissions from their root object.
Arguments:
CLUSTER_ID: The cluster for which to get or manage permissions.
Usage:
databricks clusters set-permissions CLUSTER_ID [flags]
Flags:
-h, --help help for set-permissions
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters spark-versions --help
List available Spark versions.
Returns the list of available Spark versions. These versions can be used to
launch a cluster.
Usage:
databricks clusters spark-versions [flags]
Flags:
-h, --help help for spark-versions
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters start --help
Start terminated cluster.
Starts a terminated Spark cluster with the supplied ID. This works similar to
createCluster except:
* The previous cluster id and attributes are preserved. * The cluster starts
with the last specified cluster size. * If the previous cluster was an
autoscaling cluster, the current cluster starts with the minimum number of
nodes. * If the cluster is not currently in a TERMINATED state, nothing will
happen. * Clusters launched to run a job cannot be started.
Arguments:
CLUSTER_ID: The cluster to be started.
Usage:
databricks clusters start CLUSTER_ID [flags]
Flags:
-h, --help help for start
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--no-wait do not wait to reach RUNNING state
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters unpin --help
Unpin cluster.
Unpinning a cluster will allow the cluster to eventually be removed from the
ListClusters API. Unpinning a cluster that is not pinned will have no effect.
This API can only be called by workspace admins.
Arguments:
CLUSTER_ID: <needs content added>
Usage:
databricks clusters unpin CLUSTER_ID [flags]
Flags:
-h, --help help for unpin
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters update --help
Update cluster configuration (partial).
Updates the configuration of a cluster to match the partial set of attributes
and size. Denote which fields to update using the update_mask field in the
request body. A cluster can be updated if it is in a RUNNING or TERMINATED
state. If a cluster is updated while in a RUNNING state, it will be
restarted so that the new attributes can take effect. If a cluster is updated
while in a TERMINATED state, it will remain TERMINATED. The updated
attributes will take effect the next time the cluster is started using the
clusters/start API. Attempts to update a cluster in any other state will be
rejected with an INVALID_STATE error code. Clusters created by the
Databricks Jobs service cannot be updated.
Arguments:
CLUSTER_ID: ID of the cluster.
UPDATE_MASK: Specifies which fields of the cluster will be updated. This is required in
the POST request. The update mask should be supplied as a single string.
To specify multiple fields, separate them with commas (no spaces). To
delete a field from a cluster configuration, add it to the update_mask
string but omit it from the cluster object.
Usage:
databricks clusters update CLUSTER_ID UPDATE_MASK [flags]
Flags:
-h, --help help for update
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
--no-wait do not wait to reach RUNNING state
--timeout duration maximum amount of time to reach RUNNING state (default 20m0s)
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
>>> $CLI clusters update-permissions --help
Update cluster permissions.
Updates the permissions on a cluster. Clusters can inherit permissions from
their root object.
Arguments:
CLUSTER_ID: The cluster for which to get or manage permissions.
Usage:
databricks clusters update-permissions CLUSTER_ID [flags]
Flags:
-h, --help help for update-permissions
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)

View File

@ -0,0 +1,22 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI clusters change-owner --help
trace $CLI clusters create --help
trace $CLI clusters delete --help
trace $CLI clusters edit --help
trace $CLI clusters events --help
trace $CLI clusters get --help
trace $CLI clusters get-permission-levels --help
trace $CLI clusters get-permissions --help
trace $CLI clusters list --help
trace $CLI clusters list-node-types --help
trace $CLI clusters list-zones --help
trace $CLI clusters permanent-delete --help
trace $CLI clusters pin --help
trace $CLI clusters resize --help
trace $CLI clusters restart --help
trace $CLI clusters set-permissions --help
trace $CLI clusters spark-versions --help
trace $CLI clusters start --help
trace $CLI clusters unpin --help
trace $CLI clusters update --help
trace $CLI clusters update-permissions --help

View File

@ -0,0 +1,5 @@
>>> $CLI command-execution cancel --help
Error: unknown command "command-execution" for "databricks"
Exit code: 1

View File

@ -0,0 +1,7 @@
# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
trace $CLI command-execution cancel --help
trace $CLI command-execution command-status --help
trace $CLI command-execution context-status --help
trace $CLI command-execution create --help
trace $CLI command-execution destroy --help
trace $CLI command-execution execute --help

View File

@ -0,0 +1,5 @@
>>> $CLI compliance-security-profile get --help
Error: unknown command "compliance-security-profile" for "databricks"
Exit code: 1

Some files were not shown because too many files have changed in this diff Show More