Upgrade Go SDK to 0.44.0 (#1679)

## Changes
Upgrade Go SDK to 0.44.0

---------

Co-authored-by: Pieter Noordhuis <pieter.noordhuis@databricks.com>
This commit is contained in:
Andrew Nester 2024-08-15 15:23:07 +02:00 committed by GitHub
parent a6eb673d55
commit 54799a1918
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
60 changed files with 3251 additions and 609 deletions

View File

@ -1 +1 @@
7437dabb9dadee402c1fc060df4c1ce8cc5369f0
f98c07f9c71f579de65d2587bb0292f83d10e55d

6
.gitattributes vendored
View File

@ -24,10 +24,12 @@ cmd/account/service-principals/service-principals.go linguist-generated=true
cmd/account/settings/settings.go linguist-generated=true
cmd/account/storage-credentials/storage-credentials.go linguist-generated=true
cmd/account/storage/storage.go linguist-generated=true
cmd/account/usage-dashboards/usage-dashboards.go linguist-generated=true
cmd/account/users/users.go linguist-generated=true
cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
cmd/account/workspaces/workspaces.go linguist-generated=true
cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
cmd/workspace/alerts/alerts.go linguist-generated=true
cmd/workspace/apps/apps.go linguist-generated=true
cmd/workspace/artifact-allowlists/artifact-allowlists.go linguist-generated=true
@ -54,6 +56,7 @@ cmd/workspace/enhanced-security-monitoring/enhanced-security-monitoring.go lingu
cmd/workspace/experiments/experiments.go linguist-generated=true
cmd/workspace/external-locations/external-locations.go linguist-generated=true
cmd/workspace/functions/functions.go linguist-generated=true
cmd/workspace/genie/genie.go linguist-generated=true
cmd/workspace/git-credentials/git-credentials.go linguist-generated=true
cmd/workspace/global-init-scripts/global-init-scripts.go linguist-generated=true
cmd/workspace/grants/grants.go linguist-generated=true
@ -67,6 +70,7 @@ cmd/workspace/libraries/libraries.go linguist-generated=true
cmd/workspace/metastores/metastores.go linguist-generated=true
cmd/workspace/model-registry/model-registry.go linguist-generated=true
cmd/workspace/model-versions/model-versions.go linguist-generated=true
cmd/workspace/notification-destinations/notification-destinations.go linguist-generated=true
cmd/workspace/online-tables/online-tables.go linguist-generated=true
cmd/workspace/permission-migration/permission-migration.go linguist-generated=true
cmd/workspace/permissions/permissions.go linguist-generated=true
@ -81,8 +85,10 @@ cmd/workspace/provider-provider-analytics-dashboards/provider-provider-analytics
cmd/workspace/provider-providers/provider-providers.go linguist-generated=true
cmd/workspace/providers/providers.go linguist-generated=true
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
cmd/workspace/queries/queries.go linguist-generated=true
cmd/workspace/query-history/query-history.go linguist-generated=true
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
cmd/workspace/recipients/recipients.go linguist-generated=true

View File

@ -220,7 +220,7 @@ type resolvers struct {
func allResolvers() *resolvers {
r := &resolvers{}
r.Alert = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
entity, err := w.Alerts.GetByName(ctx, name)
entity, err := w.Alerts.GetByDisplayName(ctx, name)
if err != nil {
return "", err
}
@ -284,7 +284,7 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.PipelineId), nil
}
r.Query = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
entity, err := w.Queries.GetByName(ctx, name)
entity, err := w.Queries.GetByDisplayName(ctx, name)
if err != nil {
return "", err
}

View File

@ -53,7 +53,7 @@ func (r *pipelineRunner) logErrorEvent(ctx context.Context, pipelineId string, u
// Otherwise for long lived pipelines, there can be a lot of unnecessary
// latency due to multiple pagination API calls needed underneath the hood for
// ListPipelineEventsAll
res, err := w.Pipelines.Impl().ListPipelineEvents(ctx, pipelines.ListPipelineEventsRequest{
events, err := w.Pipelines.ListPipelineEventsAll(ctx, pipelines.ListPipelineEventsRequest{
Filter: `level='ERROR'`,
MaxResults: 100,
PipelineId: pipelineId,
@ -61,7 +61,7 @@ func (r *pipelineRunner) logErrorEvent(ctx context.Context, pipelineId string, u
if err != nil {
return err
}
updateEvents := filterEventsByUpdateId(res.Events, updateId)
updateEvents := filterEventsByUpdateId(events, updateId)
// The events API returns most recent events first. We iterate in a reverse order
// to print the events chronologically
for i := len(updateEvents) - 1; i >= 0; i-- {

View File

@ -78,7 +78,7 @@ func (l *UpdateTracker) Events(ctx context.Context) ([]ProgressEvent, error) {
}
// we only check the most recent 100 events for progress
response, err := l.w.Pipelines.Impl().ListPipelineEvents(ctx, pipelines.ListPipelineEventsRequest{
events, err := l.w.Pipelines.ListPipelineEventsAll(ctx, pipelines.ListPipelineEventsRequest{
PipelineId: l.PipelineId,
MaxResults: 100,
Filter: filter,
@ -89,8 +89,8 @@ func (l *UpdateTracker) Events(ctx context.Context) ([]ProgressEvent, error) {
result := make([]ProgressEvent, 0)
// we iterate in reverse to return events in chronological order
for i := len(response.Events) - 1; i >= 0; i-- {
event := response.Events[i]
for i := len(events) - 1; i >= 0; i-- {
event := events[i]
// filter to only include update_progress and flow_progress events
if event.EventType == "flow_progress" || event.EventType == "update_progress" {
result = append(result, ProgressEvent(event))

View File

@ -218,7 +218,7 @@
}
},
"description": {
"description": "An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding."
"description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding."
},
"edit_mode": {
"description": "Edit mode of the job.\n\n* `UI_LOCKED`: The job is in a locked UI state and cannot be modified.\n* `EDITABLE`: The job is in an editable state and can be modified."
@ -935,7 +935,7 @@
}
},
"egg": {
"description": "URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"egg\": \"/Workspace/path/to/library.egg\" }`, `{ \"egg\" : \"/Volumes/path/to/library.egg\" }` or\n`{ \"egg\": \"s3://my-bucket/library.egg\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI."
"description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above."
},
"jar": {
"description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI."
@ -1827,13 +1827,16 @@
}
},
"external_model": {
"description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. When an external_model is present, the served\nentities list can only have one served_entity object. For an existing endpoint with external_model, it can not be updated to an endpoint without external_model.\nIf the endpoint is created without external_model, users cannot update it to add external_model later.\n",
"description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n",
"properties": {
"ai21labs_config": {
"description": "AI21Labs Config. Only required if the provider is 'ai21labs'.",
"properties": {
"ai21labs_api_key": {
"description": "The Databricks secret key reference for an AI21Labs API key."
"description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`."
},
"ai21labs_api_key_plaintext": {
"description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`."
}
}
},
@ -1841,13 +1844,19 @@
"description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.",
"properties": {
"aws_access_key_id": {
"description": "The Databricks secret key reference for an AWS Access Key ID with permissions to interact with Bedrock services."
"description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`."
},
"aws_access_key_id_plaintext": {
"description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`."
},
"aws_region": {
"description": "The AWS region to use. Bedrock has to be enabled there."
},
"aws_secret_access_key": {
"description": "The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID, with permissions to interact with Bedrock services."
"description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`."
},
"aws_secret_access_key_plaintext": {
"description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`."
},
"bedrock_provider": {
"description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon."
@ -1858,15 +1867,24 @@
"description": "Anthropic Config. Only required if the provider is 'anthropic'.",
"properties": {
"anthropic_api_key": {
"description": "The Databricks secret key reference for an Anthropic API key."
"description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`."
},
"anthropic_api_key_plaintext": {
"description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`."
}
}
},
"cohere_config": {
"description": "Cohere Config. Only required if the provider is 'cohere'.",
"properties": {
"cohere_api_base": {
"description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n"
},
"cohere_api_key": {
"description": "The Databricks secret key reference for a Cohere API key."
"description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`."
},
"cohere_api_key_plaintext": {
"description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`."
}
}
},
@ -1874,13 +1892,33 @@
"description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.",
"properties": {
"databricks_api_token": {
"description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\n"
"description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n"
},
"databricks_api_token_plaintext": {
"description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n"
},
"databricks_workspace_url": {
"description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n"
}
}
},
"google_cloud_vertex_ai_config": {
"description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.",
"properties": {
"private_key": {
"description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`"
},
"private_key_plaintext": {
"description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`."
},
"project_id": {
"description": "This is the Google Cloud project id that the service account is associated with."
},
"region": {
"description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions."
}
}
},
"name": {
"description": "The name of the external model."
},
@ -1891,16 +1929,22 @@
"description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n"
},
"microsoft_entra_client_secret": {
"description": "The Databricks secret key reference for the Microsoft Entra Client Secret that is\nonly required for Azure AD OpenAI.\n"
"description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n"
},
"microsoft_entra_client_secret_plaintext": {
"description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n"
},
"microsoft_entra_tenant_id": {
"description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n"
},
"openai_api_base": {
"description": "This is the base URL for the OpenAI API (default: \"https://api.openai.com/v1\").\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\n"
"description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n"
},
"openai_api_key": {
"description": "The Databricks secret key reference for an OpenAI or Azure OpenAI API key."
"description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`."
},
"openai_api_key_plaintext": {
"description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`."
},
"openai_api_type": {
"description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n"
@ -1920,12 +1964,15 @@
"description": "PaLM Config. Only required if the provider is 'palm'.",
"properties": {
"palm_api_key": {
"description": "The Databricks secret key reference for a PaLM API key."
"description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`."
},
"palm_api_key_plaintext": {
"description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`."
}
}
},
"provider": {
"description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and 'palm'.\",\n"
"description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n"
},
"task": {
"description": "The task type of the external model."
@ -2331,6 +2378,9 @@
"driver_node_type_id": {
"description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above."
},
"enable_local_disk_encryption": {
"description": "Whether to enable local disk encryption for the cluster."
},
"gcp_attributes": {
"description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.",
"properties": {
@ -2525,7 +2575,7 @@
"description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location."
},
"gateway_storage_name": {
"description": "Required. The Unity Catalog-compatible naming for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nDelta Live Tables system will automatically create the storage location under the catalog and schema.\n"
"description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nDelta Live Tables system will automatically create the storage location under the catalog and schema.\n"
},
"gateway_storage_schema": {
"description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location."
@ -2565,7 +2615,7 @@
"description": "Required. Schema name in the source database."
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the ManagedIngestionPipelineDefinition object.",
"description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.",
"properties": {
"primary_keys": {
"description": "The primary key of the table used to apply changes.",
@ -2605,7 +2655,7 @@
"description": "Required. Table name in the source database."
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the ManagedIngestionPipelineDefinition object and the SchemaSpec.",
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.",
"properties": {
"primary_keys": {
"description": "The primary key of the table used to apply changes.",
@ -2685,6 +2735,9 @@
"description": "The absolute path of the notebook."
}
}
},
"whl": {
"description": "URI of the whl to be installed."
}
}
}
@ -2955,6 +3008,49 @@
}
}
}
},
"schemas": {
"description": "",
"additionalproperties": {
"description": "",
"properties": {
"catalog_name": {
"description": ""
},
"comment": {
"description": ""
},
"grants": {
"description": "",
"items": {
"description": "",
"properties": {
"principal": {
"description": ""
},
"privileges": {
"description": "",
"items": {
"description": ""
}
}
}
}
},
"name": {
"description": ""
},
"properties": {
"description": "",
"additionalproperties": {
"description": ""
}
},
"storage_root": {
"description": ""
}
}
}
}
}
},
@ -3194,7 +3290,7 @@
}
},
"description": {
"description": "An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding."
"description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding."
},
"edit_mode": {
"description": "Edit mode of the job.\n\n* `UI_LOCKED`: The job is in a locked UI state and cannot be modified.\n* `EDITABLE`: The job is in an editable state and can be modified."
@ -3911,7 +4007,7 @@
}
},
"egg": {
"description": "URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"egg\": \"/Workspace/path/to/library.egg\" }`, `{ \"egg\" : \"/Volumes/path/to/library.egg\" }` or\n`{ \"egg\": \"s3://my-bucket/library.egg\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI."
"description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above."
},
"jar": {
"description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI."
@ -4803,13 +4899,16 @@
}
},
"external_model": {
"description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. When an external_model is present, the served\nentities list can only have one served_entity object. For an existing endpoint with external_model, it can not be updated to an endpoint without external_model.\nIf the endpoint is created without external_model, users cannot update it to add external_model later.\n",
"description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n",
"properties": {
"ai21labs_config": {
"description": "AI21Labs Config. Only required if the provider is 'ai21labs'.",
"properties": {
"ai21labs_api_key": {
"description": "The Databricks secret key reference for an AI21Labs API key."
"description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`."
},
"ai21labs_api_key_plaintext": {
"description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`."
}
}
},
@ -4817,13 +4916,19 @@
"description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.",
"properties": {
"aws_access_key_id": {
"description": "The Databricks secret key reference for an AWS Access Key ID with permissions to interact with Bedrock services."
"description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`."
},
"aws_access_key_id_plaintext": {
"description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`."
},
"aws_region": {
"description": "The AWS region to use. Bedrock has to be enabled there."
},
"aws_secret_access_key": {
"description": "The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID, with permissions to interact with Bedrock services."
"description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`."
},
"aws_secret_access_key_plaintext": {
"description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`."
},
"bedrock_provider": {
"description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon."
@ -4834,15 +4939,24 @@
"description": "Anthropic Config. Only required if the provider is 'anthropic'.",
"properties": {
"anthropic_api_key": {
"description": "The Databricks secret key reference for an Anthropic API key."
"description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`."
},
"anthropic_api_key_plaintext": {
"description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`."
}
}
},
"cohere_config": {
"description": "Cohere Config. Only required if the provider is 'cohere'.",
"properties": {
"cohere_api_base": {
"description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n"
},
"cohere_api_key": {
"description": "The Databricks secret key reference for a Cohere API key."
"description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`."
},
"cohere_api_key_plaintext": {
"description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`."
}
}
},
@ -4850,13 +4964,33 @@
"description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.",
"properties": {
"databricks_api_token": {
"description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\n"
"description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n"
},
"databricks_api_token_plaintext": {
"description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n"
},
"databricks_workspace_url": {
"description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n"
}
}
},
"google_cloud_vertex_ai_config": {
"description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.",
"properties": {
"private_key": {
"description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`"
},
"private_key_plaintext": {
"description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`."
},
"project_id": {
"description": "This is the Google Cloud project id that the service account is associated with."
},
"region": {
"description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions."
}
}
},
"name": {
"description": "The name of the external model."
},
@ -4867,16 +5001,22 @@
"description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n"
},
"microsoft_entra_client_secret": {
"description": "The Databricks secret key reference for the Microsoft Entra Client Secret that is\nonly required for Azure AD OpenAI.\n"
"description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n"
},
"microsoft_entra_client_secret_plaintext": {
"description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n"
},
"microsoft_entra_tenant_id": {
"description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n"
},
"openai_api_base": {
"description": "This is the base URL for the OpenAI API (default: \"https://api.openai.com/v1\").\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\n"
"description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n"
},
"openai_api_key": {
"description": "The Databricks secret key reference for an OpenAI or Azure OpenAI API key."
"description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`."
},
"openai_api_key_plaintext": {
"description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`."
},
"openai_api_type": {
"description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n"
@ -4896,12 +5036,15 @@
"description": "PaLM Config. Only required if the provider is 'palm'.",
"properties": {
"palm_api_key": {
"description": "The Databricks secret key reference for a PaLM API key."
"description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`."
},
"palm_api_key_plaintext": {
"description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`."
}
}
},
"provider": {
"description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and 'palm'.\",\n"
"description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n"
},
"task": {
"description": "The task type of the external model."
@ -5307,6 +5450,9 @@
"driver_node_type_id": {
"description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above."
},
"enable_local_disk_encryption": {
"description": "Whether to enable local disk encryption for the cluster."
},
"gcp_attributes": {
"description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.",
"properties": {
@ -5501,7 +5647,7 @@
"description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location."
},
"gateway_storage_name": {
"description": "Required. The Unity Catalog-compatible naming for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nDelta Live Tables system will automatically create the storage location under the catalog and schema.\n"
"description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nDelta Live Tables system will automatically create the storage location under the catalog and schema.\n"
},
"gateway_storage_schema": {
"description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location."
@ -5541,7 +5687,7 @@
"description": "Required. Schema name in the source database."
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the ManagedIngestionPipelineDefinition object.",
"description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.",
"properties": {
"primary_keys": {
"description": "The primary key of the table used to apply changes.",
@ -5581,7 +5727,7 @@
"description": "Required. Table name in the source database."
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the ManagedIngestionPipelineDefinition object and the SchemaSpec.",
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.",
"properties": {
"primary_keys": {
"description": "The primary key of the table used to apply changes.",
@ -5661,6 +5807,9 @@
"description": "The absolute path of the notebook."
}
}
},
"whl": {
"description": "URI of the whl to be installed."
}
}
}
@ -5931,6 +6080,49 @@
}
}
}
},
"schemas": {
"description": "",
"additionalproperties": {
"description": "",
"properties": {
"catalog_name": {
"description": ""
},
"comment": {
"description": ""
},
"grants": {
"description": "",
"items": {
"description": "",
"properties": {
"principal": {
"description": ""
},
"privileges": {
"description": "",
"items": {
"description": ""
}
}
}
}
},
"name": {
"description": ""
},
"properties": {
"description": "",
"additionalproperties": {
"description": ""
}
},
"storage_root": {
"description": ""
}
}
}
}
}
},
@ -6010,6 +6202,9 @@
"description": ""
}
}
},
"type": {
"description": ""
}
}
}
@ -6115,6 +6310,9 @@
"description": ""
}
}
},
"type": {
"description": ""
}
}
}

View File

@ -19,16 +19,15 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "budgets",
Short: `These APIs manage budget configuration including notifications for exceeding a budget for a period.`,
Long: `These APIs manage budget configuration including notifications for exceeding a
budget for a period. They can also retrieve the status of each budget.`,
Short: `These APIs manage budget configurations for this account.`,
Long: `These APIs manage budget configurations for this account. Budgets enable you
to monitor usage across your account. You can set up budgets to either track
account-wide spending, or apply filters to track the spending of specific
teams, projects, or workspaces.`,
GroupID: "billing",
Annotations: map[string]string{
"package": "billing",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods
@ -52,23 +51,24 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*billing.WrappedBudget,
*billing.CreateBudgetConfigurationRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq billing.WrappedBudget
var createReq billing.CreateBudgetConfigurationRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "create"
cmd.Short = `Create a new budget.`
cmd.Long = `Create a new budget.
cmd.Short = `Create new budget.`
cmd.Long = `Create new budget.
Creates a new budget in the specified account.`
Create a new budget configuration for an account. For full details, see
https://docs.databricks.com/en/admin/account-settings/budgets.html.`
cmd.Annotations = make(map[string]string)
@ -111,13 +111,13 @@ func newCreate() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*billing.DeleteBudgetRequest,
*billing.DeleteBudgetConfigurationRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq billing.DeleteBudgetRequest
var deleteReq billing.DeleteBudgetConfigurationRequest
// TODO: short flags
@ -125,35 +125,24 @@ func newDelete() *cobra.Command {
cmd.Short = `Delete budget.`
cmd.Long = `Delete budget.
Deletes the budget specified by its UUID.
Deletes a budget configuration for an account. Both account and budget
configuration are specified by ID. This cannot be undone.
Arguments:
BUDGET_ID: Budget ID`
BUDGET_ID: The Databricks budget configuration ID.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No BUDGET_ID argument specified. Loading names for Budgets drop-down."
names, err := a.Budgets.BudgetWithStatusNameToBudgetIdMap(ctx)
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Budgets drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "Budget ID")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have budget id")
}
deleteReq.BudgetId = args[0]
err = a.Budgets.Delete(ctx, deleteReq)
@ -181,50 +170,38 @@ func newDelete() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*billing.GetBudgetRequest,
*billing.GetBudgetConfigurationRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq billing.GetBudgetRequest
var getReq billing.GetBudgetConfigurationRequest
// TODO: short flags
cmd.Use = "get BUDGET_ID"
cmd.Short = `Get budget and its status.`
cmd.Long = `Get budget and its status.
cmd.Short = `Get budget.`
cmd.Long = `Get budget.
Gets the budget specified by its UUID, including noncumulative status for each
day that the budget is configured to include.
Gets a budget configuration for an account. Both account and budget
configuration are specified by ID.
Arguments:
BUDGET_ID: Budget ID`
BUDGET_ID: The Databricks budget configuration ID.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No BUDGET_ID argument specified. Loading names for Budgets drop-down."
names, err := a.Budgets.BudgetWithStatusNameToBudgetIdMap(ctx)
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Budgets drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "Budget ID")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have budget id")
}
getReq.BudgetId = args[0]
response, err := a.Budgets.Get(ctx, getReq)
@ -252,25 +229,37 @@ func newGet() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*billing.ListBudgetConfigurationsRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq billing.ListBudgetConfigurationsRequest
// TODO: short flags
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A page token received from a previous get all budget configurations call.`)
cmd.Use = "list"
cmd.Short = `Get all budgets.`
cmd.Long = `Get all budgets.
Gets all budgets associated with this account, including noncumulative status
for each day that the budget is configured to include.`
Gets all budgets associated with this account.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
response := a.Budgets.List(ctx)
response := a.Budgets.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
@ -280,7 +269,7 @@ func newList() *cobra.Command {
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd)
fn(cmd, &listReq)
}
return cmd
@ -292,13 +281,13 @@ func newList() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*billing.WrappedBudget,
*billing.UpdateBudgetConfigurationRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq billing.WrappedBudget
var updateReq billing.UpdateBudgetConfigurationRequest
var updateJson flags.JsonFlag
// TODO: short flags
@ -308,11 +297,11 @@ func newUpdate() *cobra.Command {
cmd.Short = `Modify budget.`
cmd.Long = `Modify budget.
Modifies a budget in this account. Budget properties are completely
overwritten.
Updates a budget configuration for an account. Both account and budget
configuration are specified by ID.
Arguments:
BUDGET_ID: Budget ID`
BUDGET_ID: The Databricks budget configuration ID.`
cmd.Annotations = make(map[string]string)
@ -336,11 +325,11 @@ func newUpdate() *cobra.Command {
}
updateReq.BudgetId = args[0]
err = a.Budgets.Update(ctx, updateReq)
response, err := a.Budgets.Update(ctx, updateReq)
if err != nil {
return err
}
return nil
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
@ -355,4 +344,4 @@ func newUpdate() *cobra.Command {
return cmd
}
// end service Budgets
// end service budgets

4
cmd/account/cmd.go generated
View File

@ -26,6 +26,7 @@ import (
account_settings "github.com/databricks/cli/cmd/account/settings"
storage "github.com/databricks/cli/cmd/account/storage"
account_storage_credentials "github.com/databricks/cli/cmd/account/storage-credentials"
usage_dashboards "github.com/databricks/cli/cmd/account/usage-dashboards"
account_users "github.com/databricks/cli/cmd/account/users"
vpc_endpoints "github.com/databricks/cli/cmd/account/vpc-endpoints"
workspace_assignment "github.com/databricks/cli/cmd/account/workspace-assignment"
@ -40,7 +41,6 @@ func New() *cobra.Command {
cmd.AddCommand(account_access_control.New())
cmd.AddCommand(billable_usage.New())
cmd.AddCommand(budgets.New())
cmd.AddCommand(credentials.New())
cmd.AddCommand(custom_app_integration.New())
cmd.AddCommand(encryption_keys.New())
@ -59,10 +59,12 @@ func New() *cobra.Command {
cmd.AddCommand(account_settings.New())
cmd.AddCommand(storage.New())
cmd.AddCommand(account_storage_credentials.New())
cmd.AddCommand(usage_dashboards.New())
cmd.AddCommand(account_users.New())
cmd.AddCommand(vpc_endpoints.New())
cmd.AddCommand(workspace_assignment.New())
cmd.AddCommand(workspaces.New())
cmd.AddCommand(budgets.New())
// Register all groups with the parent command.
groups := Groups()

View File

@ -3,8 +3,6 @@
package custom_app_integration
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
@ -19,8 +17,8 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "custom-app-integration",
Short: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`,
Long: `These APIs enable administrators to manage custom oauth app integrations,
Short: `These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`,
Long: `These APIs enable administrators to manage custom OAuth app integrations,
which is required for adding/using Custom OAuth App Integration like Tableau
Cloud for Databricks in AWS cloud.`,
GroupID: "oauth2",
@ -62,7 +60,9 @@ func newCreate() *cobra.Command {
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `indicates if an oauth client-secret should be generated.`)
cmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `This field indicates whether an OAuth client secret is required to authenticate this client.`)
cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Name of the custom OAuth app.`)
// TODO: array: redirect_urls
// TODO: array: scopes
// TODO: complex arg: token_access_policy
@ -72,11 +72,16 @@ func newCreate() *cobra.Command {
Create Custom OAuth App Integration.
You can retrieve the custom oauth app integration via
You can retrieve the custom OAuth app integration via
:method:CustomAppIntegration/get.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
@ -87,8 +92,6 @@ func newCreate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := a.CustomAppIntegration.Create(ctx, createReq)
@ -131,10 +134,7 @@ func newDelete() *cobra.Command {
cmd.Long = `Delete Custom OAuth App Integration.
Delete an existing Custom OAuth App Integration. You can retrieve the custom
oauth app integration via :method:CustomAppIntegration/get.
Arguments:
INTEGRATION_ID: The oauth app integration ID.`
OAuth app integration via :method:CustomAppIntegration/get.`
cmd.Annotations = make(map[string]string)
@ -189,10 +189,7 @@ func newGet() *cobra.Command {
cmd.Short = `Get OAuth Custom App Integration.`
cmd.Long = `Get OAuth Custom App Integration.
Gets the Custom OAuth App Integration for the given integration id.
Arguments:
INTEGRATION_ID: The oauth app integration ID.`
Gets the Custom OAuth App Integration for the given integration id.`
cmd.Annotations = make(map[string]string)
@ -233,25 +230,40 @@ func newGet() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*oauth2.ListCustomAppIntegrationsRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq oauth2.ListCustomAppIntegrationsRequest
// TODO: short flags
cmd.Flags().BoolVar(&listReq.IncludeCreatorUsername, "include-creator-username", listReq.IncludeCreatorUsername, ``)
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, ``)
cmd.Use = "list"
cmd.Short = `Get custom oauth app integrations.`
cmd.Long = `Get custom oauth app integrations.
Get the list of custom oauth app integrations for the specified Databricks
Get the list of custom OAuth app integrations for the specified Databricks
account`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
response := a.CustomAppIntegration.List(ctx)
response := a.CustomAppIntegration.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
@ -261,7 +273,7 @@ func newList() *cobra.Command {
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd)
fn(cmd, &listReq)
}
return cmd
@ -293,10 +305,7 @@ func newUpdate() *cobra.Command {
cmd.Long = `Updates Custom OAuth App Integration.
Updates an existing custom OAuth App Integration. You can retrieve the custom
oauth app integration via :method:CustomAppIntegration/get.
Arguments:
INTEGRATION_ID: The oauth app integration ID.`
OAuth app integration via :method:CustomAppIntegration/get.`
cmd.Annotations = make(map[string]string)

View File

@ -54,7 +54,7 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().Int64Var(&listReq.PageSize, "page-size", listReq.PageSize, `The max number of OAuth published apps to return.`)
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `The max number of OAuth published apps to return in one page.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`)
cmd.Use = "list"

View File

@ -17,8 +17,8 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "published-app-integration",
Short: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.`,
Long: `These APIs enable administrators to manage published oauth app integrations,
Short: `These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.`,
Long: `These APIs enable administrators to manage published OAuth app integrations,
which is required for adding/using Published OAuth App Integration like
Tableau Desktop for Databricks in AWS cloud.`,
GroupID: "oauth2",
@ -60,7 +60,7 @@ func newCreate() *cobra.Command {
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&createReq.AppId, "app-id", createReq.AppId, `app_id of the oauth published app integration.`)
cmd.Flags().StringVar(&createReq.AppId, "app-id", createReq.AppId, `App id of the OAuth published app integration.`)
// TODO: complex arg: token_access_policy
cmd.Use = "create"
@ -69,7 +69,7 @@ func newCreate() *cobra.Command {
Create Published OAuth App Integration.
You can retrieve the published oauth app integration via
You can retrieve the published OAuth app integration via
:method:PublishedAppIntegration/get.`
cmd.Annotations = make(map[string]string)
@ -131,10 +131,7 @@ func newDelete() *cobra.Command {
cmd.Long = `Delete Published OAuth App Integration.
Delete an existing Published OAuth App Integration. You can retrieve the
published oauth app integration via :method:PublishedAppIntegration/get.
Arguments:
INTEGRATION_ID: The oauth app integration ID.`
published OAuth app integration via :method:PublishedAppIntegration/get.`
cmd.Annotations = make(map[string]string)
@ -189,10 +186,7 @@ func newGet() *cobra.Command {
cmd.Short = `Get OAuth Published App Integration.`
cmd.Long = `Get OAuth Published App Integration.
Gets the Published OAuth App Integration for the given integration id.
Arguments:
INTEGRATION_ID: The oauth app integration ID.`
Gets the Published OAuth App Integration for the given integration id.`
cmd.Annotations = make(map[string]string)
@ -233,25 +227,39 @@ func newGet() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*oauth2.ListPublishedAppIntegrationsRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq oauth2.ListPublishedAppIntegrationsRequest
// TODO: short flags
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, ``)
cmd.Use = "list"
cmd.Short = `Get published oauth app integrations.`
cmd.Long = `Get published oauth app integrations.
Get the list of published oauth app integrations for the specified Databricks
Get the list of published OAuth app integrations for the specified Databricks
account`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
response := a.PublishedAppIntegration.List(ctx)
response := a.PublishedAppIntegration.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
@ -261,7 +269,7 @@ func newList() *cobra.Command {
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd)
fn(cmd, &listReq)
}
return cmd
@ -292,10 +300,7 @@ func newUpdate() *cobra.Command {
cmd.Long = `Updates Published OAuth App Integration.
Updates an existing published OAuth App Integration. You can retrieve the
published oauth app integration via :method:PublishedAppIntegration/get.
Arguments:
INTEGRATION_ID: The oauth app integration ID.`
published OAuth app integration via :method:PublishedAppIntegration/get.`
cmd.Annotations = make(map[string]string)

164
cmd/account/usage-dashboards/usage-dashboards.go generated Executable file
View File

@ -0,0 +1,164 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package usage_dashboards
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/billing"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "usage-dashboards",
Short: `These APIs manage usage dashboards for this account.`,
Long: `These APIs manage usage dashboards for this account. Usage dashboards enable
you to gain insights into your usage with pre-built dashboards: visualize
breakdowns, analyze tag attributions, and identify cost drivers.`,
GroupID: "billing",
Annotations: map[string]string{
"package": "billing",
},
}
// Add methods
cmd.AddCommand(newCreate())
cmd.AddCommand(newGet())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start create command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*billing.CreateBillingUsageDashboardRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq billing.CreateBillingUsageDashboardRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().Var(&createReq.DashboardType, "dashboard-type", `Workspace level usage dashboard shows usage data for the specified workspace ID. Supported values: [USAGE_DASHBOARD_TYPE_GLOBAL, USAGE_DASHBOARD_TYPE_WORKSPACE]`)
cmd.Flags().Int64Var(&createReq.WorkspaceId, "workspace-id", createReq.WorkspaceId, `The workspace ID of the workspace in which the usage dashboard is created.`)
cmd.Use = "create"
cmd.Short = `Create new usage dashboard.`
cmd.Long = `Create new usage dashboard.
Create a usage dashboard specified by workspaceId, accountId, and dashboard
type.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
if cmd.Flags().Changed("json") {
err = createJson.Unmarshal(&createReq)
if err != nil {
return err
}
}
response, err := a.UsageDashboards.Create(ctx, createReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range createOverrides {
fn(cmd, &createReq)
}
return cmd
}
// start get command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*billing.GetBillingUsageDashboardRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq billing.GetBillingUsageDashboardRequest
// TODO: short flags
cmd.Flags().Var(&getReq.DashboardType, "dashboard-type", `Workspace level usage dashboard shows usage data for the specified workspace ID. Supported values: [USAGE_DASHBOARD_TYPE_GLOBAL, USAGE_DASHBOARD_TYPE_WORKSPACE]`)
cmd.Flags().Int64Var(&getReq.WorkspaceId, "workspace-id", getReq.WorkspaceId, `The workspace ID of the workspace in which the usage dashboard is created.`)
cmd.Use = "get"
cmd.Short = `Get usage dashboard.`
cmd.Long = `Get usage dashboard.
Get a usage dashboard specified by workspaceId, accountId, and dashboard type.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustAccountClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
a := root.AccountClient(ctx)
response, err := a.UsageDashboards.Get(ctx, getReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getOverrides {
fn(cmd, &getReq)
}
return cmd
}
// end service UsageDashboards

View File

@ -66,7 +66,7 @@ func newDelete() *cobra.Command {
for the specified principal.
Arguments:
WORKSPACE_ID: The workspace ID.
WORKSPACE_ID: The workspace ID for the account.
PRINCIPAL_ID: The ID of the user, service principal, or group.`
cmd.Annotations = make(map[string]string)
@ -247,6 +247,8 @@ func newUpdate() *cobra.Command {
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: array: permissions
cmd.Use = "update WORKSPACE_ID PRINCIPAL_ID"
cmd.Short = `Create or update permissions assignment.`
cmd.Long = `Create or update permissions assignment.
@ -255,7 +257,7 @@ func newUpdate() *cobra.Command {
workspace for the specified principal.
Arguments:
WORKSPACE_ID: The workspace ID.
WORKSPACE_ID: The workspace ID for the account.
PRINCIPAL_ID: The ID of the user, service principal, or group.`
cmd.Annotations = make(map[string]string)
@ -275,8 +277,6 @@ func newUpdate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
_, err = fmt.Sscan(args[0], &updateReq.WorkspaceId)
if err != nil {

View File

@ -15,6 +15,7 @@ import (
"github.com/databricks/cli/cmd/sync"
"github.com/databricks/cli/cmd/version"
"github.com/databricks/cli/cmd/workspace"
"github.com/databricks/cli/cmd/workspace/apps"
"github.com/spf13/cobra"
)
@ -67,6 +68,7 @@ func New(ctx context.Context) *cobra.Command {
// Add other subcommands.
cli.AddCommand(api.New())
cli.AddCommand(apps.New())
cli.AddCommand(auth.New())
cli.AddCommand(bundle.New())
cli.AddCommand(configure.New())

View File

@ -182,7 +182,7 @@ func TestInstallerWorksForReleases(t *testing.T) {
w.Write(raw)
return
}
if r.URL.Path == "/api/2.0/clusters/get" {
if r.URL.Path == "/api/2.1/clusters/get" {
respondWithJSON(t, w, &compute.ClusterDetails{
State: compute.StateRunning,
})
@ -249,8 +249,9 @@ func TestInstallerWorksForDevelopment(t *testing.T) {
Path: filepath.Dir(t.TempDir()),
})
}()
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/api/2.0/clusters/list" {
if r.URL.Path == "/api/2.1/clusters/list" {
respondWithJSON(t, w, compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{
{
@ -278,7 +279,7 @@ func TestInstallerWorksForDevelopment(t *testing.T) {
})
return
}
if r.URL.Path == "/api/2.0/clusters/spark-versions" {
if r.URL.Path == "/api/2.1/clusters/spark-versions" {
respondWithJSON(t, w, compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{
@ -289,7 +290,7 @@ func TestInstallerWorksForDevelopment(t *testing.T) {
})
return
}
if r.URL.Path == "/api/2.0/clusters/get" {
if r.URL.Path == "/api/2.1/clusters/get" {
respondWithJSON(t, w, &compute.ClusterDetails{
State: compute.StateRunning,
})
@ -387,7 +388,7 @@ func TestUpgraderWorksForReleases(t *testing.T) {
w.Write(raw)
return
}
if r.URL.Path == "/api/2.0/clusters/get" {
if r.URL.Path == "/api/2.1/clusters/get" {
respondWithJSON(t, w, &compute.ClusterDetails{
State: compute.StateRunning,
})

View File

@ -111,6 +111,10 @@ func TestAccountClientOrPrompt(t *testing.T) {
expectPrompts(t, accountPromptFn, &config.Config{
Host: "https://accounts.azuredatabricks.net/",
AccountID: "1234",
// Force SDK to not try and lookup the tenant ID from the host.
// The host above is invalid and will not be reachable.
AzureTenantID: "nonempty",
})
})
@ -165,6 +169,10 @@ func TestWorkspaceClientOrPrompt(t *testing.T) {
t.Run("Prompt if no credential provider can be configured", func(t *testing.T) {
expectPrompts(t, workspacePromptFn, &config.Config{
Host: "https://adb-1111.11.azuredatabricks.net/",
// Force SDK to not try and lookup the tenant ID from the host.
// The host above is invalid and will not be reachable.
AzureTenantID: "nonempty",
})
})

388
cmd/workspace/alerts-legacy/alerts-legacy.go generated Executable file
View File

@ -0,0 +1,388 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package alerts_legacy
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "alerts-legacy",
Short: `The alerts API can be used to perform CRUD operations on alerts.`,
Long: `The alerts API can be used to perform CRUD operations on alerts. An alert is a
Databricks SQL object that periodically runs a query, evaluates a condition of
its result, and notifies one or more users and/or notification destinations if
the condition was met. Alerts can be scheduled using the sql_task type of
the Jobs API, e.g. :method:jobs/create.
**Note**: A new version of the Databricks SQL API is now available. Please see
the latest version. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
},
}
// Add methods
cmd.AddCommand(newCreate())
cmd.AddCommand(newDelete())
cmd.AddCommand(newGet())
cmd.AddCommand(newList())
cmd.AddCommand(newUpdate())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start create command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*sql.CreateAlert,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq sql.CreateAlert
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`)
cmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`)
cmd.Use = "create"
cmd.Short = `Create an alert.`
cmd.Long = `Create an alert.
Creates an alert. An alert is a Databricks SQL object that periodically runs a
query, evaluates a condition of its result, and notifies users or notification
destinations if the condition was met.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/create instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = createJson.Unmarshal(&createReq)
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.AlertsLegacy.Create(ctx, createReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range createOverrides {
fn(cmd, &createReq)
}
return cmd
}
// start delete command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*sql.DeleteAlertsLegacyRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq sql.DeleteAlertsLegacyRequest
// TODO: short flags
cmd.Use = "delete ALERT_ID"
cmd.Short = `Delete an alert.`
cmd.Long = `Delete an alert.
Deletes an alert. Deleted alerts are no longer accessible and cannot be
restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to
the trash.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/delete instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts Legacy drop-down."
names, err := w.AlertsLegacy.LegacyAlertNameToIdMap(ctx)
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Alerts Legacy drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
deleteReq.AlertId = args[0]
err = w.AlertsLegacy.Delete(ctx, deleteReq)
if err != nil {
return err
}
return nil
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range deleteOverrides {
fn(cmd, &deleteReq)
}
return cmd
}
// start get command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*sql.GetAlertsLegacyRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq sql.GetAlertsLegacyRequest
// TODO: short flags
cmd.Use = "get ALERT_ID"
cmd.Short = `Get an alert.`
cmd.Long = `Get an alert.
Gets an alert.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/get instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts Legacy drop-down."
names, err := w.AlertsLegacy.LegacyAlertNameToIdMap(ctx)
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Alerts Legacy drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
getReq.AlertId = args[0]
response, err := w.AlertsLegacy.Get(ctx, getReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getOverrides {
fn(cmd, &getReq)
}
return cmd
}
// start list command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
cmd.Use = "list"
cmd.Short = `Get alerts.`
cmd.Long = `Get alerts.
Gets a list of alerts.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/list instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response, err := w.AlertsLegacy.List(ctx)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd)
}
return cmd
}
// start update command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*sql.EditAlert,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq sql.EditAlert
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().IntVar(&updateReq.Rearm, "rearm", updateReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`)
cmd.Use = "update ALERT_ID"
cmd.Short = `Update an alert.`
cmd.Long = `Update an alert.
Updates an alert.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:alerts/update instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = updateJson.Unmarshal(&updateReq)
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
updateReq.AlertId = args[0]
err = w.AlertsLegacy.Update(ctx, updateReq)
if err != nil {
return err
}
return nil
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updateOverrides {
fn(cmd, &updateReq)
}
return cmd
}
// end service AlertsLegacy

View File

@ -24,12 +24,7 @@ func New() *cobra.Command {
Databricks SQL object that periodically runs a query, evaluates a condition of
its result, and notifies one or more users and/or notification destinations if
the condition was met. Alerts can be scheduled using the sql_task type of
the Jobs API, e.g. :method:jobs/create.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`,
the Jobs API, e.g. :method:jobs/create.`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
@ -57,36 +52,33 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*sql.CreateAlert,
*sql.CreateAlertRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq sql.CreateAlert
var createReq sql.CreateAlertRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`)
cmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`)
// TODO: complex arg: alert
cmd.Use = "create"
cmd.Short = `Create an alert.`
cmd.Long = `Create an alert.
Creates an alert. An alert is a Databricks SQL object that periodically runs a
query, evaluates a condition of its result, and notifies users or notification
destinations if the condition was met.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Creates an alert.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
@ -97,8 +89,6 @@ func newCreate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.Alerts.Create(ctx, createReq)
@ -126,28 +116,23 @@ func newCreate() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*sql.DeleteAlertRequest,
*sql.TrashAlertRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq sql.DeleteAlertRequest
var deleteReq sql.TrashAlertRequest
// TODO: short flags
cmd.Use = "delete ALERT_ID"
cmd.Use = "delete ID"
cmd.Short = `Delete an alert.`
cmd.Long = `Delete an alert.
Deletes an alert. Deleted alerts are no longer accessible and cannot be
restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to
the trash.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Moves an alert to the trash. Trashed alerts immediately disappear from
searches and list views, and can no longer trigger. You can restore a trashed
alert through the UI. A trashed alert is permanently deleted after 30 days.`
cmd.Annotations = make(map[string]string)
@ -158,8 +143,8 @@ func newDelete() *cobra.Command {
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts drop-down."
names, err := w.Alerts.AlertNameToIdMap(ctx)
promptSpinner <- "No ID argument specified. Loading names for Alerts drop-down."
names, err := w.Alerts.ListAlertsResponseAlertDisplayNameToIdMap(ctx, sql.ListAlertsRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Alerts drop-down. Please manually specify required arguments. Original error: %w", err)
@ -173,7 +158,7 @@ func newDelete() *cobra.Command {
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
deleteReq.AlertId = args[0]
deleteReq.Id = args[0]
err = w.Alerts.Delete(ctx, deleteReq)
if err != nil {
@ -210,16 +195,11 @@ func newGet() *cobra.Command {
// TODO: short flags
cmd.Use = "get ALERT_ID"
cmd.Use = "get ID"
cmd.Short = `Get an alert.`
cmd.Long = `Get an alert.
Gets an alert.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Gets an alert.`
cmd.Annotations = make(map[string]string)
@ -230,8 +210,8 @@ func newGet() *cobra.Command {
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts drop-down."
names, err := w.Alerts.AlertNameToIdMap(ctx)
promptSpinner <- "No ID argument specified. Loading names for Alerts drop-down."
names, err := w.Alerts.ListAlertsResponseAlertDisplayNameToIdMap(ctx, sql.ListAlertsRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Alerts drop-down. Please manually specify required arguments. Original error: %w", err)
@ -245,7 +225,7 @@ func newGet() *cobra.Command {
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
getReq.AlertId = args[0]
getReq.Id = args[0]
response, err := w.Alerts.Get(ctx, getReq)
if err != nil {
@ -272,33 +252,41 @@ func newGet() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*sql.ListAlertsRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq sql.ListAlertsRequest
// TODO: short flags
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, ``)
cmd.Use = "list"
cmd.Short = `Get alerts.`
cmd.Long = `Get alerts.
cmd.Short = `List alerts.`
cmd.Long = `List alerts.
Gets a list of alerts.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Gets a list of alerts accessible to the user, ordered by creation time.
**Warning:** Calling this API concurrently 10 or more times could result in
throttling, service degradation, or a temporary ban.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response, err := w.Alerts.List(ctx)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
response := w.Alerts.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
@ -307,7 +295,7 @@ func newList() *cobra.Command {
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd)
fn(cmd, &listReq)
}
return cmd
@ -319,35 +307,44 @@ func newList() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*sql.EditAlert,
*sql.UpdateAlertRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq sql.EditAlert
var updateReq sql.UpdateAlertRequest
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().IntVar(&updateReq.Rearm, "rearm", updateReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`)
// TODO: complex arg: alert
cmd.Use = "update ALERT_ID"
cmd.Use = "update ID UPDATE_MASK"
cmd.Short = `Update an alert.`
cmd.Long = `Update an alert.
Updates an alert.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Arguments:
ID:
UPDATE_MASK: Field mask is required to be passed into the PATCH request. Field mask
specifies which fields of the setting payload will be updated. The field
mask needs to be supplied as single string. To specify multiple fields in
the field mask, use comma as the separator (no space).`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
if cmd.Flags().Changed("json") {
err := root.ExactArgs(1)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only ID as positional arguments. Provide 'update_mask' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
}
@ -361,16 +358,17 @@ func newUpdate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
updateReq.AlertId = args[0]
updateReq.Id = args[0]
if !cmd.Flags().Changed("json") {
updateReq.UpdateMask = args[1]
}
err = w.Alerts.Update(ctx, updateReq)
response, err := w.Alerts.Update(ctx, updateReq)
if err != nil {
return err
}
return nil
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.

View File

@ -9,7 +9,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/serving"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
)
@ -24,9 +24,9 @@ func New() *cobra.Command {
Long: `Apps run directly on a customers Databricks instance, integrate with their
data, use and extend Databricks services, and enable users to interact through
single sign-on.`,
GroupID: "serving",
GroupID: "apps",
Annotations: map[string]string{
"package": "serving",
"package": "apps",
},
// This service is being previewed; hide from help output.
@ -39,12 +39,15 @@ func New() *cobra.Command {
cmd.AddCommand(newDeploy())
cmd.AddCommand(newGet())
cmd.AddCommand(newGetDeployment())
cmd.AddCommand(newGetEnvironment())
cmd.AddCommand(newGetPermissionLevels())
cmd.AddCommand(newGetPermissions())
cmd.AddCommand(newList())
cmd.AddCommand(newListDeployments())
cmd.AddCommand(newSetPermissions())
cmd.AddCommand(newStart())
cmd.AddCommand(newStop())
cmd.AddCommand(newUpdate())
cmd.AddCommand(newUpdatePermissions())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
@ -60,13 +63,13 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*serving.CreateAppRequest,
*apps.CreateAppRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq serving.CreateAppRequest
var createReq apps.CreateAppRequest
var createJson flags.JsonFlag
var createSkipWait bool
@ -126,7 +129,7 @@ func newCreate() *cobra.Command {
return cmdio.Render(ctx, wait.Response)
}
spinner := cmdio.Spinner(ctx)
info, err := wait.OnProgress(func(i *serving.App) {
info, err := wait.OnProgress(func(i *apps.App) {
if i.Status == nil {
return
}
@ -162,13 +165,13 @@ func newCreate() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*serving.DeleteAppRequest,
*apps.DeleteAppRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq serving.DeleteAppRequest
var deleteReq apps.DeleteAppRequest
// TODO: short flags
@ -220,13 +223,13 @@ func newDelete() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var deployOverrides []func(
*cobra.Command,
*serving.CreateAppDeploymentRequest,
*apps.CreateAppDeploymentRequest,
)
func newDeploy() *cobra.Command {
cmd := &cobra.Command{}
var deployReq serving.CreateAppDeploymentRequest
var deployReq apps.CreateAppDeploymentRequest
var deployJson flags.JsonFlag
var deploySkipWait bool
@ -237,7 +240,9 @@ func newDeploy() *cobra.Command {
// TODO: short flags
cmd.Flags().Var(&deployJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "deploy APP_NAME SOURCE_CODE_PATH MODE"
cmd.Flags().Var(&deployReq.Mode, "mode", `The mode of which the deployment will manage the source code. Supported values: [AUTO_SYNC, SNAPSHOT]`)
cmd.Use = "deploy APP_NAME SOURCE_CODE_PATH"
cmd.Short = `Create an app deployment.`
cmd.Long = `Create an app deployment.
@ -251,8 +256,7 @@ func newDeploy() *cobra.Command {
deployed app. The former refers to the original source code location of
the app in the workspace during deployment creation, whereas the latter
provides a system generated stable snapshotted source code path used by
the deployment.
MODE: The mode of which the deployment will manage the source code.`
the deployment.`
cmd.Annotations = make(map[string]string)
@ -260,11 +264,11 @@ func newDeploy() *cobra.Command {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(1)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only APP_NAME as positional arguments. Provide 'source_code_path', 'mode' in your JSON input")
return fmt.Errorf("when --json flag is specified, provide only APP_NAME as positional arguments. Provide 'source_code_path' in your JSON input")
}
return nil
}
check := root.ExactArgs(3)
check := root.ExactArgs(2)
return check(cmd, args)
}
@ -283,12 +287,6 @@ func newDeploy() *cobra.Command {
if !cmd.Flags().Changed("json") {
deployReq.SourceCodePath = args[1]
}
if !cmd.Flags().Changed("json") {
_, err = fmt.Sscan(args[2], &deployReq.Mode)
if err != nil {
return fmt.Errorf("invalid MODE: %s", args[2])
}
}
wait, err := w.Apps.Deploy(ctx, deployReq)
if err != nil {
@ -298,7 +296,7 @@ func newDeploy() *cobra.Command {
return cmdio.Render(ctx, wait.Response)
}
spinner := cmdio.Spinner(ctx)
info, err := wait.OnProgress(func(i *serving.AppDeployment) {
info, err := wait.OnProgress(func(i *apps.AppDeployment) {
if i.Status == nil {
return
}
@ -334,13 +332,13 @@ func newDeploy() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*serving.GetAppRequest,
*apps.GetAppRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq serving.GetAppRequest
var getReq apps.GetAppRequest
// TODO: short flags
@ -392,13 +390,13 @@ func newGet() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var getDeploymentOverrides []func(
*cobra.Command,
*serving.GetAppDeploymentRequest,
*apps.GetAppDeploymentRequest,
)
func newGetDeployment() *cobra.Command {
cmd := &cobra.Command{}
var getDeploymentReq serving.GetAppDeploymentRequest
var getDeploymentReq apps.GetAppDeploymentRequest
// TODO: short flags
@ -447,30 +445,30 @@ func newGetDeployment() *cobra.Command {
return cmd
}
// start get-environment command
// start get-permission-levels command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getEnvironmentOverrides []func(
var getPermissionLevelsOverrides []func(
*cobra.Command,
*serving.GetAppEnvironmentRequest,
*apps.GetAppPermissionLevelsRequest,
)
func newGetEnvironment() *cobra.Command {
func newGetPermissionLevels() *cobra.Command {
cmd := &cobra.Command{}
var getEnvironmentReq serving.GetAppEnvironmentRequest
var getPermissionLevelsReq apps.GetAppPermissionLevelsRequest
// TODO: short flags
cmd.Use = "get-environment NAME"
cmd.Short = `Get app environment.`
cmd.Long = `Get app environment.
cmd.Use = "get-permission-levels APP_NAME"
cmd.Short = `Get app permission levels.`
cmd.Long = `Get app permission levels.
Retrieves app environment.
Gets the permission levels that a user can have on an object.
Arguments:
NAME: The name of the app.`
APP_NAME: The app for which to get or manage permissions.`
cmd.Annotations = make(map[string]string)
@ -484,9 +482,9 @@ func newGetEnvironment() *cobra.Command {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getEnvironmentReq.Name = args[0]
getPermissionLevelsReq.AppName = args[0]
response, err := w.Apps.GetEnvironment(ctx, getEnvironmentReq)
response, err := w.Apps.GetPermissionLevels(ctx, getPermissionLevelsReq)
if err != nil {
return err
}
@ -498,8 +496,67 @@ func newGetEnvironment() *cobra.Command {
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getEnvironmentOverrides {
fn(cmd, &getEnvironmentReq)
for _, fn := range getPermissionLevelsOverrides {
fn(cmd, &getPermissionLevelsReq)
}
return cmd
}
// start get-permissions command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getPermissionsOverrides []func(
*cobra.Command,
*apps.GetAppPermissionsRequest,
)
func newGetPermissions() *cobra.Command {
cmd := &cobra.Command{}
var getPermissionsReq apps.GetAppPermissionsRequest
// TODO: short flags
cmd.Use = "get-permissions APP_NAME"
cmd.Short = `Get app permissions.`
cmd.Long = `Get app permissions.
Gets the permissions of an app. Apps can inherit permissions from their root
object.
Arguments:
APP_NAME: The app for which to get or manage permissions.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getPermissionsReq.AppName = args[0]
response, err := w.Apps.GetPermissions(ctx, getPermissionsReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getPermissionsOverrides {
fn(cmd, &getPermissionsReq)
}
return cmd
@ -511,13 +568,13 @@ func newGetEnvironment() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*serving.ListAppsRequest,
*apps.ListAppsRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq serving.ListAppsRequest
var listReq apps.ListAppsRequest
// TODO: short flags
@ -564,13 +621,13 @@ func newList() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listDeploymentsOverrides []func(
*cobra.Command,
*serving.ListAppDeploymentsRequest,
*apps.ListAppDeploymentsRequest,
)
func newListDeployments() *cobra.Command {
cmd := &cobra.Command{}
var listDeploymentsReq serving.ListAppDeploymentsRequest
var listDeploymentsReq apps.ListAppDeploymentsRequest
// TODO: short flags
@ -616,20 +673,94 @@ func newListDeployments() *cobra.Command {
return cmd
}
// start set-permissions command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var setPermissionsOverrides []func(
*cobra.Command,
*apps.AppPermissionsRequest,
)
func newSetPermissions() *cobra.Command {
cmd := &cobra.Command{}
var setPermissionsReq apps.AppPermissionsRequest
var setPermissionsJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: array: access_control_list
cmd.Use = "set-permissions APP_NAME"
cmd.Short = `Set app permissions.`
cmd.Long = `Set app permissions.
Sets permissions on an app. Apps can inherit permissions from their root
object.
Arguments:
APP_NAME: The app for which to get or manage permissions.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = setPermissionsJson.Unmarshal(&setPermissionsReq)
if err != nil {
return err
}
}
setPermissionsReq.AppName = args[0]
response, err := w.Apps.SetPermissions(ctx, setPermissionsReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range setPermissionsOverrides {
fn(cmd, &setPermissionsReq)
}
return cmd
}
// start start command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var startOverrides []func(
*cobra.Command,
*serving.StartAppRequest,
*apps.StartAppRequest,
)
func newStart() *cobra.Command {
cmd := &cobra.Command{}
var startReq serving.StartAppRequest
var startReq apps.StartAppRequest
var startSkipWait bool
var startTimeout time.Duration
cmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach SUCCEEDED state`)
cmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach SUCCEEDED state`)
// TODO: short flags
cmd.Use = "start NAME"
@ -655,11 +786,30 @@ func newStart() *cobra.Command {
startReq.Name = args[0]
response, err := w.Apps.Start(ctx, startReq)
wait, err := w.Apps.Start(ctx, startReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
if startSkipWait {
return cmdio.Render(ctx, wait.Response)
}
spinner := cmdio.Spinner(ctx)
info, err := wait.OnProgress(func(i *apps.AppDeployment) {
if i.Status == nil {
return
}
status := i.Status.State
statusMessage := fmt.Sprintf("current status: %s", status)
if i.Status != nil {
statusMessage = i.Status.Message
}
spinner <- statusMessage
}).GetWithTimeout(startTimeout)
close(spinner)
if err != nil {
return err
}
return cmdio.Render(ctx, info)
}
// Disable completions since they are not applicable.
@ -680,13 +830,13 @@ func newStart() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var stopOverrides []func(
*cobra.Command,
*serving.StopAppRequest,
*apps.StopAppRequest,
)
func newStop() *cobra.Command {
cmd := &cobra.Command{}
var stopReq serving.StopAppRequest
var stopReq apps.StopAppRequest
// TODO: short flags
@ -738,13 +888,13 @@ func newStop() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*serving.UpdateAppRequest,
*apps.UpdateAppRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq serving.UpdateAppRequest
var updateReq apps.UpdateAppRequest
var updateJson flags.JsonFlag
// TODO: short flags
@ -801,4 +951,73 @@ func newUpdate() *cobra.Command {
return cmd
}
// start update-permissions command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updatePermissionsOverrides []func(
*cobra.Command,
*apps.AppPermissionsRequest,
)
func newUpdatePermissions() *cobra.Command {
cmd := &cobra.Command{}
var updatePermissionsReq apps.AppPermissionsRequest
var updatePermissionsJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: array: access_control_list
cmd.Use = "update-permissions APP_NAME"
cmd.Short = `Update app permissions.`
cmd.Long = `Update app permissions.
Updates the permissions on an app. Apps can inherit permissions from their
root object.
Arguments:
APP_NAME: The app for which to get or manage permissions.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = updatePermissionsJson.Unmarshal(&updatePermissionsReq)
if err != nil {
return err
}
}
updatePermissionsReq.AppName = args[0]
response, err := w.Apps.UpdatePermissions(ctx, updatePermissionsReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updatePermissionsOverrides {
fn(cmd, &updatePermissionsReq)
}
return cmd
}
// end service Apps

View File

@ -90,30 +90,20 @@ func newCreate() *cobra.Command {
cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `Additional human-readable description of the cluster policy.`)
// TODO: array: libraries
cmd.Flags().Int64Var(&createReq.MaxClustersPerUser, "max-clusters-per-user", createReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`)
cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Cluster Policy name requested by the user.`)
cmd.Flags().StringVar(&createReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", createReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in [Databricks Policy Definition Language](https://docs.databricks.com/administration-guide/clusters/policy-definition.html).`)
cmd.Flags().StringVar(&createReq.PolicyFamilyId, "policy-family-id", createReq.PolicyFamilyId, `ID of the policy family.`)
cmd.Use = "create NAME"
cmd.Use = "create"
cmd.Short = `Create a new policy.`
cmd.Long = `Create a new policy.
Creates a new policy with prescribed settings.
Arguments:
NAME: Cluster Policy name requested by the user. This has to be unique. Length
must be between 1 and 100 characters.`
Creates a new policy with prescribed settings.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(0)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input")
}
return nil
}
check := root.ExactArgs(1)
check := root.ExactArgs(0)
return check(cmd, args)
}
@ -128,9 +118,6 @@ func newCreate() *cobra.Command {
return err
}
}
if !cmd.Flags().Changed("json") {
createReq.Name = args[0]
}
response, err := w.ClusterPolicies.Create(ctx, createReq)
if err != nil {
@ -264,10 +251,11 @@ func newEdit() *cobra.Command {
cmd.Flags().StringVar(&editReq.Description, "description", editReq.Description, `Additional human-readable description of the cluster policy.`)
// TODO: array: libraries
cmd.Flags().Int64Var(&editReq.MaxClustersPerUser, "max-clusters-per-user", editReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`)
cmd.Flags().StringVar(&editReq.Name, "name", editReq.Name, `Cluster Policy name requested by the user.`)
cmd.Flags().StringVar(&editReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", editReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in [Databricks Policy Definition Language](https://docs.databricks.com/administration-guide/clusters/policy-definition.html).`)
cmd.Flags().StringVar(&editReq.PolicyFamilyId, "policy-family-id", editReq.PolicyFamilyId, `ID of the policy family.`)
cmd.Use = "edit POLICY_ID NAME"
cmd.Use = "edit POLICY_ID"
cmd.Short = `Update a cluster policy.`
cmd.Long = `Update a cluster policy.
@ -275,9 +263,7 @@ func newEdit() *cobra.Command {
governed by the previous policy invalid.
Arguments:
POLICY_ID: The ID of the policy to update.
NAME: Cluster Policy name requested by the user. This has to be unique. Length
must be between 1 and 100 characters.`
POLICY_ID: The ID of the policy to update.`
cmd.Annotations = make(map[string]string)
@ -285,12 +271,11 @@ func newEdit() *cobra.Command {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(0)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'policy_id', 'name' in your JSON input")
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'policy_id' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
return nil
}
cmd.PreRunE = root.MustWorkspaceClient
@ -303,13 +288,26 @@ func newEdit() *cobra.Command {
if err != nil {
return err
}
}
if !cmd.Flags().Changed("json") {
} else {
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No POLICY_ID argument specified. Loading names for Cluster Policies drop-down."
names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "The ID of the policy to update")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have the id of the policy to update")
}
editReq.PolicyId = args[0]
}
if !cmd.Flags().Changed("json") {
editReq.Name = args[1]
}
err = w.ClusterPolicies.Edit(ctx, editReq)
if err != nil {
@ -353,7 +351,7 @@ func newGet() *cobra.Command {
Get a cluster policy entity. Creation and editing is available to admins only.
Arguments:
POLICY_ID: Canonical unique identifier for the cluster policy.`
POLICY_ID: Canonical unique identifier for the Cluster Policy.`
cmd.Annotations = make(map[string]string)
@ -370,7 +368,7 @@ func newGet() *cobra.Command {
if err != nil {
return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "Canonical unique identifier for the cluster policy")
id, err := cmdio.Select(ctx, names, "Canonical unique identifier for the Cluster Policy")
if err != nil {
return err
}

View File

@ -43,11 +43,10 @@ func New() *cobra.Command {
manually terminate and restart an all-purpose cluster. Multiple users can
share such clusters to do collaborative interactive analysis.
IMPORTANT: Databricks retains cluster configuration information for up to 200
all-purpose clusters terminated in the last 30 days and up to 30 job clusters
recently terminated by the job scheduler. To keep an all-purpose cluster
configuration even after it has been terminated for more than 30 days, an
administrator can pin a cluster to the cluster list.`,
IMPORTANT: Databricks retains cluster configuration information for terminated
clusters for 30 days. To keep an all-purpose cluster configuration even after
it has been terminated for more than 30 days, an administrator can pin a
cluster to the cluster list.`,
GroupID: "compute",
Annotations: map[string]string{
"package": "compute",
@ -74,6 +73,7 @@ func New() *cobra.Command {
cmd.AddCommand(newSparkVersions())
cmd.AddCommand(newStart())
cmd.AddCommand(newUnpin())
cmd.AddCommand(newUpdate())
cmd.AddCommand(newUpdatePermissions())
// Apply optional overrides to this command.
@ -885,21 +885,18 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().StringVar(&listReq.CanUseClient, "can-use-client", listReq.CanUseClient, `Filter clusters based on what type of client it can be used for.`)
// TODO: complex arg: filter_by
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Use this field to specify the maximum number of results to be returned by the server.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of clusters respectively.`)
// TODO: complex arg: sort_by
cmd.Use = "list"
cmd.Short = `List all clusters.`
cmd.Long = `List all clusters.
cmd.Short = `List clusters.`
cmd.Long = `List clusters.
Return information about all pinned clusters, active clusters, up to 200 of
the most recently terminated all-purpose clusters in the past 30 days, and up
to 30 of the most recently terminated job clusters in the past 30 days.
For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated
all-purpose clusters in the past 30 days, and 50 terminated job clusters in
the past 30 days, then this API returns the 1 pinned cluster, 4 active
clusters, all 45 terminated all-purpose clusters, and the 30 most recently
terminated job clusters.`
Return information about all pinned and active clusters, and all clusters
terminated within the last 30 days. Clusters terminated prior to this period
are not included.`
cmd.Annotations = make(map[string]string)
@ -1753,6 +1750,117 @@ func newUnpin() *cobra.Command {
return cmd
}
// start update command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*compute.UpdateCluster,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq compute.UpdateCluster
var updateJson flags.JsonFlag
var updateSkipWait bool
var updateTimeout time.Duration
cmd.Flags().BoolVar(&updateSkipWait, "no-wait", updateSkipWait, `do not wait to reach RUNNING state`)
cmd.Flags().DurationVar(&updateTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`)
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: complex arg: cluster
cmd.Use = "update CLUSTER_ID UPDATE_MASK"
cmd.Short = `Update cluster configuration (partial).`
cmd.Long = `Update cluster configuration (partial).
Updates the configuration of a cluster to match the partial set of attributes
and size. Denote which fields to update using the update_mask field in the
request body. A cluster can be updated if it is in a RUNNING or TERMINATED
state. If a cluster is updated while in a RUNNING state, it will be
restarted so that the new attributes can take effect. If a cluster is updated
while in a TERMINATED state, it will remain TERMINATED. The updated
attributes will take effect the next time the cluster is started using the
clusters/start API. Attempts to update a cluster in any other state will be
rejected with an INVALID_STATE error code. Clusters created by the
Databricks Jobs service cannot be updated.
Arguments:
CLUSTER_ID: ID of the cluster.
UPDATE_MASK: Specifies which fields of the cluster will be updated. This is required in
the POST request. The update mask should be supplied as a single string.
To specify multiple fields, separate them with commas (no spaces). To
delete a field from a cluster configuration, add it to the update_mask
string but omit it from the cluster object.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(0)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'cluster_id', 'update_mask' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = updateJson.Unmarshal(&updateReq)
if err != nil {
return err
}
}
if !cmd.Flags().Changed("json") {
updateReq.ClusterId = args[0]
}
if !cmd.Flags().Changed("json") {
updateReq.UpdateMask = args[1]
}
wait, err := w.Clusters.Update(ctx, updateReq)
if err != nil {
return err
}
if updateSkipWait {
return nil
}
spinner := cmdio.Spinner(ctx)
info, err := wait.OnProgress(func(i *compute.ClusterDetails) {
statusMessage := i.StateMessage
spinner <- statusMessage
}).GetWithTimeout(updateTimeout)
close(spinner)
if err != nil {
return err
}
return cmdio.Render(ctx, info)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updateOverrides {
fn(cmd, &updateReq)
}
return cmd
}
// start update-permissions command
// Slice with functions to override default command behavior.

10
cmd/workspace/cmd.go generated
View File

@ -4,6 +4,7 @@ package workspace
import (
alerts "github.com/databricks/cli/cmd/workspace/alerts"
alerts_legacy "github.com/databricks/cli/cmd/workspace/alerts-legacy"
apps "github.com/databricks/cli/cmd/workspace/apps"
artifact_allowlists "github.com/databricks/cli/cmd/workspace/artifact-allowlists"
catalogs "github.com/databricks/cli/cmd/workspace/catalogs"
@ -24,6 +25,7 @@ import (
experiments "github.com/databricks/cli/cmd/workspace/experiments"
external_locations "github.com/databricks/cli/cmd/workspace/external-locations"
functions "github.com/databricks/cli/cmd/workspace/functions"
genie "github.com/databricks/cli/cmd/workspace/genie"
git_credentials "github.com/databricks/cli/cmd/workspace/git-credentials"
global_init_scripts "github.com/databricks/cli/cmd/workspace/global-init-scripts"
grants "github.com/databricks/cli/cmd/workspace/grants"
@ -37,6 +39,7 @@ import (
metastores "github.com/databricks/cli/cmd/workspace/metastores"
model_registry "github.com/databricks/cli/cmd/workspace/model-registry"
model_versions "github.com/databricks/cli/cmd/workspace/model-versions"
notification_destinations "github.com/databricks/cli/cmd/workspace/notification-destinations"
online_tables "github.com/databricks/cli/cmd/workspace/online-tables"
permission_migration "github.com/databricks/cli/cmd/workspace/permission-migration"
permissions "github.com/databricks/cli/cmd/workspace/permissions"
@ -52,8 +55,10 @@ import (
providers "github.com/databricks/cli/cmd/workspace/providers"
quality_monitors "github.com/databricks/cli/cmd/workspace/quality-monitors"
queries "github.com/databricks/cli/cmd/workspace/queries"
queries_legacy "github.com/databricks/cli/cmd/workspace/queries-legacy"
query_history "github.com/databricks/cli/cmd/workspace/query-history"
query_visualizations "github.com/databricks/cli/cmd/workspace/query-visualizations"
query_visualizations_legacy "github.com/databricks/cli/cmd/workspace/query-visualizations-legacy"
recipient_activation "github.com/databricks/cli/cmd/workspace/recipient-activation"
recipients "github.com/databricks/cli/cmd/workspace/recipients"
registered_models "github.com/databricks/cli/cmd/workspace/registered-models"
@ -85,6 +90,7 @@ func All() []*cobra.Command {
var out []*cobra.Command
out = append(out, alerts.New())
out = append(out, alerts_legacy.New())
out = append(out, apps.New())
out = append(out, artifact_allowlists.New())
out = append(out, catalogs.New())
@ -105,6 +111,7 @@ func All() []*cobra.Command {
out = append(out, experiments.New())
out = append(out, external_locations.New())
out = append(out, functions.New())
out = append(out, genie.New())
out = append(out, git_credentials.New())
out = append(out, global_init_scripts.New())
out = append(out, grants.New())
@ -118,6 +125,7 @@ func All() []*cobra.Command {
out = append(out, metastores.New())
out = append(out, model_registry.New())
out = append(out, model_versions.New())
out = append(out, notification_destinations.New())
out = append(out, online_tables.New())
out = append(out, permission_migration.New())
out = append(out, permissions.New())
@ -133,8 +141,10 @@ func All() []*cobra.Command {
out = append(out, providers.New())
out = append(out, quality_monitors.New())
out = append(out, queries.New())
out = append(out, queries_legacy.New())
out = append(out, query_history.New())
out = append(out, query_visualizations.New())
out = append(out, query_visualizations_legacy.New())
out = append(out, recipient_activation.New())
out = append(out, recipients.New())
out = append(out, registered_models.New())

View File

@ -22,9 +22,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -26,9 +26,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -25,9 +25,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods
@ -186,14 +183,12 @@ func newList() *cobra.Command {
// TODO: array: assets
// TODO: array: categories
cmd.Flags().BoolVar(&listReq.IsAscending, "is-ascending", listReq.IsAscending, ``)
cmd.Flags().BoolVar(&listReq.IsFree, "is-free", listReq.IsFree, `Filters each listing based on if it is free.`)
cmd.Flags().BoolVar(&listReq.IsPrivateExchange, "is-private-exchange", listReq.IsPrivateExchange, `Filters each listing based on if it is a private exchange.`)
cmd.Flags().BoolVar(&listReq.IsStaffPick, "is-staff-pick", listReq.IsStaffPick, `Filters each listing based on whether it is a staff pick.`)
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, ``)
// TODO: array: provider_ids
cmd.Flags().Var(&listReq.SortBy, "sort-by", `Criteria for sorting the resulting set of listings. Supported values: [SORT_BY_DATE, SORT_BY_RELEVANCE, SORT_BY_TITLE, SORT_BY_UNSPECIFIED]`)
// TODO: array: tags
cmd.Use = "list"
@ -249,13 +244,11 @@ func newSearch() *cobra.Command {
// TODO: array: assets
// TODO: array: categories
cmd.Flags().BoolVar(&searchReq.IsAscending, "is-ascending", searchReq.IsAscending, ``)
cmd.Flags().BoolVar(&searchReq.IsFree, "is-free", searchReq.IsFree, ``)
cmd.Flags().BoolVar(&searchReq.IsPrivateExchange, "is-private-exchange", searchReq.IsPrivateExchange, ``)
cmd.Flags().IntVar(&searchReq.PageSize, "page-size", searchReq.PageSize, ``)
cmd.Flags().StringVar(&searchReq.PageToken, "page-token", searchReq.PageToken, ``)
// TODO: array: provider_ids
cmd.Flags().Var(&searchReq.SortBy, "sort-by", `. Supported values: [SORT_BY_DATE, SORT_BY_RELEVANCE, SORT_BY_TITLE, SORT_BY_UNSPECIFIED]`)
cmd.Use = "search QUERY"
cmd.Short = `Search listings.`

View File

@ -26,9 +26,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -24,9 +24,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -27,10 +27,10 @@ func New() *cobra.Command {
grep to search the response from this API for the name of your SQL warehouse
as it appears in Databricks SQL.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
**Note**: A new version of the Databricks SQL API is now available. [Learn
more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`,
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
@ -67,10 +67,10 @@ func newList() *cobra.Command {
fields that appear in this API response are enumerated for clarity. However,
you need only a SQL warehouse's id to create new queries against it.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:warehouses/list instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)

437
cmd/workspace/genie/genie.go generated Executable file
View File

@ -0,0 +1,437 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package genie
import (
"fmt"
"time"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "genie",
Short: `Genie provides a no-code experience for business users, powered by AI/BI.`,
Long: `Genie provides a no-code experience for business users, powered by AI/BI.
Analysts set up spaces that business users can use to ask questions using
natural language. Genie uses data registered to Unity Catalog and requires at
least CAN USE permission on a Pro or Serverless SQL warehouse. Also,
Databricks Assistant must be enabled.`,
GroupID: "dashboards",
Annotations: map[string]string{
"package": "dashboards",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods
cmd.AddCommand(newCreateMessage())
cmd.AddCommand(newExecuteMessageQuery())
cmd.AddCommand(newGetMessage())
cmd.AddCommand(newGetMessageQueryResult())
cmd.AddCommand(newStartConversation())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start create-message command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var createMessageOverrides []func(
*cobra.Command,
*dashboards.GenieCreateConversationMessageRequest,
)
func newCreateMessage() *cobra.Command {
cmd := &cobra.Command{}
var createMessageReq dashboards.GenieCreateConversationMessageRequest
var createMessageJson flags.JsonFlag
var createMessageSkipWait bool
var createMessageTimeout time.Duration
cmd.Flags().BoolVar(&createMessageSkipWait, "no-wait", createMessageSkipWait, `do not wait to reach COMPLETED state`)
cmd.Flags().DurationVar(&createMessageTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach COMPLETED state`)
// TODO: short flags
cmd.Flags().Var(&createMessageJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "create-message SPACE_ID CONVERSATION_ID CONTENT"
cmd.Short = `Create conversation message.`
cmd.Long = `Create conversation message.
Create new message in [conversation](:method:genie/startconversation). The AI
response uses all previously created messages in the conversation to respond.
Arguments:
SPACE_ID: The ID associated with the Genie space where the conversation is started.
CONVERSATION_ID: The ID associated with the conversation.
CONTENT: User message content.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(2)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only SPACE_ID, CONVERSATION_ID as positional arguments. Provide 'content' in your JSON input")
}
return nil
}
check := root.ExactArgs(3)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = createMessageJson.Unmarshal(&createMessageReq)
if err != nil {
return err
}
}
createMessageReq.SpaceId = args[0]
createMessageReq.ConversationId = args[1]
if !cmd.Flags().Changed("json") {
createMessageReq.Content = args[2]
}
wait, err := w.Genie.CreateMessage(ctx, createMessageReq)
if err != nil {
return err
}
if createMessageSkipWait {
return cmdio.Render(ctx, wait.Response)
}
spinner := cmdio.Spinner(ctx)
info, err := wait.OnProgress(func(i *dashboards.GenieMessage) {
status := i.Status
statusMessage := fmt.Sprintf("current status: %s", status)
spinner <- statusMessage
}).GetWithTimeout(createMessageTimeout)
close(spinner)
if err != nil {
return err
}
return cmdio.Render(ctx, info)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range createMessageOverrides {
fn(cmd, &createMessageReq)
}
return cmd
}
// start execute-message-query command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var executeMessageQueryOverrides []func(
*cobra.Command,
*dashboards.ExecuteMessageQueryRequest,
)
func newExecuteMessageQuery() *cobra.Command {
cmd := &cobra.Command{}
var executeMessageQueryReq dashboards.ExecuteMessageQueryRequest
// TODO: short flags
cmd.Use = "execute-message-query SPACE_ID CONVERSATION_ID MESSAGE_ID"
cmd.Short = `Execute SQL query in a conversation message.`
cmd.Long = `Execute SQL query in a conversation message.
Execute the SQL query in the message.
Arguments:
SPACE_ID: Genie space ID
CONVERSATION_ID: Conversation ID
MESSAGE_ID: Message ID`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(3)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
executeMessageQueryReq.SpaceId = args[0]
executeMessageQueryReq.ConversationId = args[1]
executeMessageQueryReq.MessageId = args[2]
response, err := w.Genie.ExecuteMessageQuery(ctx, executeMessageQueryReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range executeMessageQueryOverrides {
fn(cmd, &executeMessageQueryReq)
}
return cmd
}
// start get-message command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getMessageOverrides []func(
*cobra.Command,
*dashboards.GenieGetConversationMessageRequest,
)
func newGetMessage() *cobra.Command {
cmd := &cobra.Command{}
var getMessageReq dashboards.GenieGetConversationMessageRequest
// TODO: short flags
cmd.Use = "get-message SPACE_ID CONVERSATION_ID MESSAGE_ID"
cmd.Short = `Get conversation message.`
cmd.Long = `Get conversation message.
Get message from conversation.
Arguments:
SPACE_ID: The ID associated with the Genie space where the target conversation is
located.
CONVERSATION_ID: The ID associated with the target conversation.
MESSAGE_ID: The ID associated with the target message from the identified
conversation.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(3)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getMessageReq.SpaceId = args[0]
getMessageReq.ConversationId = args[1]
getMessageReq.MessageId = args[2]
response, err := w.Genie.GetMessage(ctx, getMessageReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getMessageOverrides {
fn(cmd, &getMessageReq)
}
return cmd
}
// start get-message-query-result command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getMessageQueryResultOverrides []func(
*cobra.Command,
*dashboards.GenieGetMessageQueryResultRequest,
)
func newGetMessageQueryResult() *cobra.Command {
cmd := &cobra.Command{}
var getMessageQueryResultReq dashboards.GenieGetMessageQueryResultRequest
// TODO: short flags
cmd.Use = "get-message-query-result SPACE_ID CONVERSATION_ID MESSAGE_ID"
cmd.Short = `Get conversation message SQL query result.`
cmd.Long = `Get conversation message SQL query result.
Get the result of SQL query if the message has a query attachment. This is
only available if a message has a query attachment and the message status is
EXECUTING_QUERY.
Arguments:
SPACE_ID: Genie space ID
CONVERSATION_ID: Conversation ID
MESSAGE_ID: Message ID`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(3)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getMessageQueryResultReq.SpaceId = args[0]
getMessageQueryResultReq.ConversationId = args[1]
getMessageQueryResultReq.MessageId = args[2]
response, err := w.Genie.GetMessageQueryResult(ctx, getMessageQueryResultReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getMessageQueryResultOverrides {
fn(cmd, &getMessageQueryResultReq)
}
return cmd
}
// start start-conversation command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var startConversationOverrides []func(
*cobra.Command,
*dashboards.GenieStartConversationMessageRequest,
)
func newStartConversation() *cobra.Command {
cmd := &cobra.Command{}
var startConversationReq dashboards.GenieStartConversationMessageRequest
var startConversationJson flags.JsonFlag
var startConversationSkipWait bool
var startConversationTimeout time.Duration
cmd.Flags().BoolVar(&startConversationSkipWait, "no-wait", startConversationSkipWait, `do not wait to reach COMPLETED state`)
cmd.Flags().DurationVar(&startConversationTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach COMPLETED state`)
// TODO: short flags
cmd.Flags().Var(&startConversationJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "start-conversation SPACE_ID CONTENT"
cmd.Short = `Start conversation.`
cmd.Long = `Start conversation.
Start a new conversation.
Arguments:
SPACE_ID: The ID associated with the Genie space where you want to start a
conversation.
CONTENT: The text of the message that starts the conversation.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(1)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only SPACE_ID as positional arguments. Provide 'content' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = startConversationJson.Unmarshal(&startConversationReq)
if err != nil {
return err
}
}
startConversationReq.SpaceId = args[0]
if !cmd.Flags().Changed("json") {
startConversationReq.Content = args[1]
}
wait, err := w.Genie.StartConversation(ctx, startConversationReq)
if err != nil {
return err
}
if startConversationSkipWait {
return cmdio.Render(ctx, wait.Response)
}
spinner := cmdio.Spinner(ctx)
info, err := wait.OnProgress(func(i *dashboards.GenieMessage) {
status := i.Status
statusMessage := fmt.Sprintf("current status: %s", status)
spinner <- statusMessage
}).GetWithTimeout(startConversationTimeout)
close(spinner)
if err != nil {
return err
}
return cmdio.Render(ctx, info)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range startConversationOverrides {
fn(cmd, &startConversationReq)
}
return cmd
}
// end service Genie

View File

@ -68,5 +68,9 @@ func Groups() []cobra.Group {
ID: "marketplace",
Title: "Marketplace",
},
{
ID: "apps",
Title: "Apps",
},
}
}

View File

@ -817,6 +817,7 @@ func newGetRun() *cobra.Command {
cmd.Flags().BoolVar(&getRunReq.IncludeHistory, "include-history", getRunReq.IncludeHistory, `Whether to include the repair history in the response.`)
cmd.Flags().BoolVar(&getRunReq.IncludeResolvedValues, "include-resolved-values", getRunReq.IncludeResolvedValues, `Whether to include resolved parameter values in the response.`)
cmd.Flags().StringVar(&getRunReq.PageToken, "page-token", getRunReq.PageToken, `To list the next page or the previous page of job tasks, set this field to the value of the next_page_token or prev_page_token returned in the GetJob response.`)
cmd.Use = "get-run RUN_ID"
cmd.Short = `Get a single job run.`

View File

@ -666,7 +666,7 @@ func newList() *cobra.Command {
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `The number of dashboards to return per page.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A page token, received from a previous ListDashboards call.`)
cmd.Flags().BoolVar(&listReq.ShowTrashed, "show-trashed", listReq.ShowTrashed, `The flag to include dashboards located in the trash.`)
cmd.Flags().Var(&listReq.View, "view", `Indicates whether to include all metadata from the dashboard in the response. Supported values: [DASHBOARD_VIEW_BASIC, DASHBOARD_VIEW_FULL]`)
cmd.Flags().Var(&listReq.View, "view", `DASHBOARD_VIEW_BASIConly includes summary metadata from the dashboard. Supported values: [DASHBOARD_VIEW_BASIC]`)
cmd.Use = "list"
cmd.Short = `List dashboards.`

View File

@ -133,6 +133,7 @@ func newGet() *cobra.Command {
// TODO: short flags
cmd.Flags().BoolVar(&getReq.IncludeAliases, "include-aliases", getReq.IncludeAliases, `Whether to include aliases associated with the model version in the response.`)
cmd.Flags().BoolVar(&getReq.IncludeBrowse, "include-browse", getReq.IncludeBrowse, `Whether to include model versions in the response for which the principal can only access selective metadata for.`)
cmd.Use = "get FULL_NAME VERSION"
@ -203,6 +204,8 @@ func newGetByAlias() *cobra.Command {
// TODO: short flags
cmd.Flags().BoolVar(&getByAliasReq.IncludeAliases, "include-aliases", getByAliasReq.IncludeAliases, `Whether to include aliases associated with the model version in the response.`)
cmd.Use = "get-by-alias FULL_NAME ALIAS"
cmd.Short = `Get Model Version By Alias.`
cmd.Long = `Get Model Version By Alias.

View File

@ -0,0 +1,342 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package notification_destinations
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "notification-destinations",
Short: `The notification destinations API lets you programmatically manage a workspace's notification destinations.`,
Long: `The notification destinations API lets you programmatically manage a
workspace's notification destinations. Notification destinations are used to
send notifications for query alerts and jobs to destinations outside of
Databricks. Only workspace admins can create, update, and delete notification
destinations.`,
GroupID: "settings",
Annotations: map[string]string{
"package": "settings",
},
}
// Add methods
cmd.AddCommand(newCreate())
cmd.AddCommand(newDelete())
cmd.AddCommand(newGet())
cmd.AddCommand(newList())
cmd.AddCommand(newUpdate())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start create command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*settings.CreateNotificationDestinationRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq settings.CreateNotificationDestinationRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: complex arg: config
cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `The display name for the notification destination.`)
cmd.Use = "create"
cmd.Short = `Create a notification destination.`
cmd.Long = `Create a notification destination.
Creates a notification destination. Requires workspace admin permissions.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = createJson.Unmarshal(&createReq)
if err != nil {
return err
}
}
response, err := w.NotificationDestinations.Create(ctx, createReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range createOverrides {
fn(cmd, &createReq)
}
return cmd
}
// start delete command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*settings.DeleteNotificationDestinationRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq settings.DeleteNotificationDestinationRequest
// TODO: short flags
cmd.Use = "delete ID"
cmd.Short = `Delete a notification destination.`
cmd.Long = `Delete a notification destination.
Deletes a notification destination. Requires workspace admin permissions.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
deleteReq.Id = args[0]
err = w.NotificationDestinations.Delete(ctx, deleteReq)
if err != nil {
return err
}
return nil
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range deleteOverrides {
fn(cmd, &deleteReq)
}
return cmd
}
// start get command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*settings.GetNotificationDestinationRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq settings.GetNotificationDestinationRequest
// TODO: short flags
cmd.Use = "get ID"
cmd.Short = `Get a notification destination.`
cmd.Long = `Get a notification destination.
Gets a notification destination.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getReq.Id = args[0]
response, err := w.NotificationDestinations.Get(ctx, getReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getOverrides {
fn(cmd, &getReq)
}
return cmd
}
// start list command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*settings.ListNotificationDestinationsRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq settings.ListNotificationDestinationsRequest
// TODO: short flags
cmd.Flags().Int64Var(&listReq.PageSize, "page-size", listReq.PageSize, ``)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, ``)
cmd.Use = "list"
cmd.Short = `List notification destinations.`
cmd.Long = `List notification destinations.
Lists notification destinations.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response := w.NotificationDestinations.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd, &listReq)
}
return cmd
}
// start update command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*settings.UpdateNotificationDestinationRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq settings.UpdateNotificationDestinationRequest
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: complex arg: config
cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `The display name for the notification destination.`)
cmd.Use = "update ID"
cmd.Short = `Update a notification destination.`
cmd.Long = `Update a notification destination.
Updates a notification destination. Requires workspace admin permissions. At
least one field is required in the request body.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = updateJson.Unmarshal(&updateReq)
if err != nil {
return err
}
}
updateReq.Id = args[0]
response, err := w.NotificationDestinations.Update(ctx, updateReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updateOverrides {
fn(cmd, &updateReq)
}
return cmd
}
// end service NotificationDestinations

View File

@ -19,9 +19,9 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "permission-migration",
Short: `This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.`,
Long: `This spec contains undocumented permission migration APIs used in
https://github.com/databrickslabs/ucx.`,
Short: `APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.`,
Long: `APIs for migrating acl permissions, used only by the ucx tool:
https://github.com/databrickslabs/ucx`,
GroupID: "iam",
Annotations: map[string]string{
"package": "iam",
@ -48,13 +48,13 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var migratePermissionsOverrides []func(
*cobra.Command,
*iam.PermissionMigrationRequest,
*iam.MigratePermissionsRequest,
)
func newMigratePermissions() *cobra.Command {
cmd := &cobra.Command{}
var migratePermissionsReq iam.PermissionMigrationRequest
var migratePermissionsReq iam.MigratePermissionsRequest
var migratePermissionsJson flags.JsonFlag
// TODO: short flags
@ -65,14 +65,10 @@ func newMigratePermissions() *cobra.Command {
cmd.Use = "migrate-permissions WORKSPACE_ID FROM_WORKSPACE_GROUP_NAME TO_ACCOUNT_GROUP_NAME"
cmd.Short = `Migrate Permissions.`
cmd.Long = `Migrate Permissions.
Migrate a batch of permissions from a workspace local group to an account
group.
Arguments:
WORKSPACE_ID: WorkspaceId of the associated workspace where the permission migration
will occur. Both workspace group and account group must be in this
workspace.
will occur.
FROM_WORKSPACE_GROUP_NAME: The name of the workspace group that permissions will be migrated from.
TO_ACCOUNT_GROUP_NAME: The name of the account group that permissions will be migrated to.`

View File

@ -21,6 +21,9 @@ func New() *cobra.Command {
Long: `Permissions API are used to create read, write, edit, update and manage access
for various users on different objects and endpoints.
* **[Apps permissions](:service:apps)** Manage which users can manage or
use apps.
* **[Cluster permissions](:service:clusters)** Manage which users can
manage, restart, or attach to clusters.
@ -59,7 +62,8 @@ func New() *cobra.Command {
create or use tokens.
* **[Workspace object permissions](:service:workspace)** Manage which
users can read, run, edit, or manage directories, files, and notebooks.
users can read, run, edit, or manage alerts, dbsql-dashboards, directories,
files, notebooks and queries.
For the mapping of the required permissions for specific actions or abilities
and other important information, see [Access Control].
@ -112,10 +116,10 @@ func newGet() *cobra.Command {
parent objects or root object.
Arguments:
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following:
authorization, clusters, cluster-policies, directories, experiments,
files, instance-pools, jobs, notebooks, pipelines, registered-models,
repos, serving-endpoints, or warehouses.
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
authorization, clusters, cluster-policies, dbsql-dashboards, directories,
experiments, files, instance-pools, jobs, notebooks, pipelines, queries,
registered-models, repos, serving-endpoints, or warehouses.
REQUEST_OBJECT_ID: The id of the request object.`
cmd.Annotations = make(map[string]string)
@ -240,10 +244,10 @@ func newSet() *cobra.Command {
parent objects or root object.
Arguments:
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following:
authorization, clusters, cluster-policies, directories, experiments,
files, instance-pools, jobs, notebooks, pipelines, registered-models,
repos, serving-endpoints, or warehouses.
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
authorization, clusters, cluster-policies, dbsql-dashboards, directories,
experiments, files, instance-pools, jobs, notebooks, pipelines, queries,
registered-models, repos, serving-endpoints, or warehouses.
REQUEST_OBJECT_ID: The id of the request object.`
cmd.Annotations = make(map[string]string)
@ -314,10 +318,10 @@ func newUpdate() *cobra.Command {
their parent objects or root object.
Arguments:
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following:
authorization, clusters, cluster-policies, directories, experiments,
files, instance-pools, jobs, notebooks, pipelines, registered-models,
repos, serving-endpoints, or warehouses.
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
authorization, clusters, cluster-policies, dbsql-dashboards, directories,
experiments, files, instance-pools, jobs, notebooks, pipelines, queries,
registered-models, repos, serving-endpoints, or warehouses.
REQUEST_OBJECT_ID: The id of the request object.`
cmd.Annotations = make(map[string]string)

View File

@ -60,11 +60,17 @@ func newGet() *cobra.Command {
// TODO: short flags
cmd.Flags().Int64Var(&getReq.Version, "version", getReq.Version, `The version number for the family to fetch.`)
cmd.Use = "get POLICY_FAMILY_ID"
cmd.Short = `Get policy family information.`
cmd.Long = `Get policy family information.
Retrieve the information for an policy family based on its identifier.`
Retrieve the information for an policy family based on its identifier and
version
Arguments:
POLICY_FAMILY_ID: The family ID about which to retrieve information.`
cmd.Annotations = make(map[string]string)
@ -115,14 +121,15 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().Int64Var(&listReq.MaxResults, "max-results", listReq.MaxResults, `The max number of policy families to return.`)
cmd.Flags().Int64Var(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of policy families to return.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`)
cmd.Use = "list"
cmd.Short = `List policy families.`
cmd.Long = `List policy families.
Retrieve a list of policy families. This API is paginated.`
Returns the list of policy definition types available to use at their latest
version. This API is paginated.`
cmd.Annotations = make(map[string]string)

View File

@ -25,9 +25,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -26,9 +26,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -26,9 +26,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -26,9 +26,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -26,9 +26,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -23,9 +23,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -25,9 +25,6 @@ func New() *cobra.Command {
Annotations: map[string]string{
"package": "marketplace",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods

View File

@ -291,6 +291,8 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().StringVar(&listReq.DataProviderGlobalMetastoreId, "data-provider-global-metastore-id", listReq.DataProviderGlobalMetastoreId, `If not provided, all providers will be returned.`)
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of providers to return.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "list"
cmd.Short = `List providers.`
@ -345,6 +347,9 @@ func newListShares() *cobra.Command {
// TODO: short flags
cmd.Flags().IntVar(&listSharesReq.MaxResults, "max-results", listSharesReq.MaxResults, `Maximum number of shares to return.`)
cmd.Flags().StringVar(&listSharesReq.PageToken, "page-token", listSharesReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "list-shares NAME"
cmd.Short = `List shares by Provider.`
cmd.Long = `List shares by Provider.

500
cmd/workspace/queries-legacy/queries-legacy.go generated Executable file
View File

@ -0,0 +1,500 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package queries_legacy
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "queries-legacy",
Short: `These endpoints are used for CRUD operations on query definitions.`,
Long: `These endpoints are used for CRUD operations on query definitions. Query
definitions include the target SQL warehouse, query text, name, description,
tags, parameters, and visualizations. Queries can be scheduled using the
sql_task type of the Jobs API, e.g. :method:jobs/create.
**Note**: A new version of the Databricks SQL API is now available. Please see
the latest version. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
},
}
// Add methods
cmd.AddCommand(newCreate())
cmd.AddCommand(newDelete())
cmd.AddCommand(newGet())
cmd.AddCommand(newList())
cmd.AddCommand(newRestore())
cmd.AddCommand(newUpdate())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start create command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*sql.QueryPostContent,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq sql.QueryPostContent
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "create"
cmd.Short = `Create a new query definition.`
cmd.Long = `Create a new query definition.
Creates a new query definition. Queries created with this endpoint belong to
the authenticated user making the request.
The data_source_id field specifies the ID of the SQL warehouse to run this
query against. You can use the Data Sources API to see a complete list of
available SQL warehouses. Or you can copy the data_source_id from an
existing query.
**Note**: You cannot add a visualization until you create the query.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queries/create instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = createJson.Unmarshal(&createReq)
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.QueriesLegacy.Create(ctx, createReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range createOverrides {
fn(cmd, &createReq)
}
return cmd
}
// start delete command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*sql.DeleteQueriesLegacyRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq sql.DeleteQueriesLegacyRequest
// TODO: short flags
cmd.Use = "delete QUERY_ID"
cmd.Short = `Delete a query.`
cmd.Long = `Delete a query.
Moves a query to the trash. Trashed queries immediately disappear from
searches and list views, and they cannot be used for alerts. The trash is
deleted after 30 days.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queries/delete instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down."
names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
deleteReq.QueryId = args[0]
err = w.QueriesLegacy.Delete(ctx, deleteReq)
if err != nil {
return err
}
return nil
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range deleteOverrides {
fn(cmd, &deleteReq)
}
return cmd
}
// start get command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*sql.GetQueriesLegacyRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq sql.GetQueriesLegacyRequest
// TODO: short flags
cmd.Use = "get QUERY_ID"
cmd.Short = `Get a query definition.`
cmd.Long = `Get a query definition.
Retrieve a query object definition along with contextual permissions
information about the currently authenticated user.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queries/get instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down."
names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
getReq.QueryId = args[0]
response, err := w.QueriesLegacy.Get(ctx, getReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getOverrides {
fn(cmd, &getReq)
}
return cmd
}
// start list command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*sql.ListQueriesLegacyRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq sql.ListQueriesLegacyRequest
// TODO: short flags
cmd.Flags().StringVar(&listReq.Order, "order", listReq.Order, `Name of query attribute to order by.`)
cmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`)
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of queries to return per page.`)
cmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`)
cmd.Use = "list"
cmd.Short = `Get a list of queries.`
cmd.Long = `Get a list of queries.
Gets a list of queries. Optionally, this list can be filtered by a search
term.
**Warning**: Calling this API concurrently 10 or more times could result in
throttling, service degradation, or a temporary ban.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queries/list instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response := w.QueriesLegacy.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd, &listReq)
}
return cmd
}
// start restore command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var restoreOverrides []func(
*cobra.Command,
*sql.RestoreQueriesLegacyRequest,
)
func newRestore() *cobra.Command {
cmd := &cobra.Command{}
var restoreReq sql.RestoreQueriesLegacyRequest
// TODO: short flags
cmd.Use = "restore QUERY_ID"
cmd.Short = `Restore a query.`
cmd.Long = `Restore a query.
Restore a query that has been moved to the trash. A restored query appears in
list views and searches. You can use restored queries for alerts.
**Note**: A new version of the Databricks SQL API is now available. Please see
the latest version. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down."
names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
restoreReq.QueryId = args[0]
err = w.QueriesLegacy.Restore(ctx, restoreReq)
if err != nil {
return err
}
return nil
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range restoreOverrides {
fn(cmd, &restoreReq)
}
return cmd
}
// start update command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*sql.QueryEditContent,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq sql.QueryEditContent
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID.`)
cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that conveys additional information about this query such as usage notes.`)
cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`)
// TODO: any: options
cmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query to be run.`)
cmd.Flags().Var(&updateReq.RunAsRole, "run-as-role", `Sets the **Run as** role for the object. Supported values: [owner, viewer]`)
// TODO: array: tags
cmd.Use = "update QUERY_ID"
cmd.Short = `Change a query definition.`
cmd.Long = `Change a query definition.
Modify this query definition.
**Note**: You cannot undo this operation.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queries/update instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = updateJson.Unmarshal(&updateReq)
if err != nil {
return err
}
}
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries Legacy drop-down."
names, err := w.QueriesLegacy.LegacyQueryNameToIdMap(ctx, sql.ListQueriesLegacyRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries Legacy drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
updateReq.QueryId = args[0]
response, err := w.QueriesLegacy.Update(ctx, updateReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updateOverrides {
fn(cmd, &updateReq)
}
return cmd
}
// end service QueriesLegacy

View File

@ -19,16 +19,11 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "queries",
Short: `These endpoints are used for CRUD operations on query definitions.`,
Long: `These endpoints are used for CRUD operations on query definitions. Query
definitions include the target SQL warehouse, query text, name, description,
tags, parameters, and visualizations. Queries can be scheduled using the
sql_task type of the Jobs API, e.g. :method:jobs/create.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`,
Short: `The queries API can be used to perform CRUD operations on queries.`,
Long: `The queries API can be used to perform CRUD operations on queries. A query is
a Databricks SQL object that includes the target SQL warehouse, query text,
name, description, tags, and parameters. Queries can be scheduled using the
sql_task type of the Jobs API, e.g. :method:jobs/create.`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
@ -40,7 +35,7 @@ func New() *cobra.Command {
cmd.AddCommand(newDelete())
cmd.AddCommand(newGet())
cmd.AddCommand(newList())
cmd.AddCommand(newRestore())
cmd.AddCommand(newListVisualizations())
cmd.AddCommand(newUpdate())
// Apply optional overrides to this command.
@ -57,39 +52,33 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*sql.QueryPostContent,
*sql.CreateQueryRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq sql.QueryPostContent
var createReq sql.CreateQueryRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: complex arg: query
cmd.Use = "create"
cmd.Short = `Create a new query definition.`
cmd.Long = `Create a new query definition.
cmd.Short = `Create a query.`
cmd.Long = `Create a query.
Creates a new query definition. Queries created with this endpoint belong to
the authenticated user making the request.
The data_source_id field specifies the ID of the SQL warehouse to run this
query against. You can use the Data Sources API to see a complete list of
available SQL warehouses. Or you can copy the data_source_id from an
existing query.
**Note**: You cannot add a visualization until you create the query.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Creates a query.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
@ -100,8 +89,6 @@ func newCreate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.Queries.Create(ctx, createReq)
@ -129,28 +116,24 @@ func newCreate() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*sql.DeleteQueryRequest,
*sql.TrashQueryRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq sql.DeleteQueryRequest
var deleteReq sql.TrashQueryRequest
// TODO: short flags
cmd.Use = "delete QUERY_ID"
cmd.Use = "delete ID"
cmd.Short = `Delete a query.`
cmd.Long = `Delete a query.
Moves a query to the trash. Trashed queries immediately disappear from
searches and list views, and they cannot be used for alerts. The trash is
deleted after 30 days.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
searches and list views, and cannot be used for alerts. You can restore a
trashed query through the UI. A trashed query is permanently deleted after 30
days.`
cmd.Annotations = make(map[string]string)
@ -161,8 +144,8 @@ func newDelete() *cobra.Command {
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{})
promptSpinner <- "No ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.ListQueryObjectsResponseQueryDisplayNameToIdMap(ctx, sql.ListQueriesRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err)
@ -176,7 +159,7 @@ func newDelete() *cobra.Command {
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
deleteReq.QueryId = args[0]
deleteReq.Id = args[0]
err = w.Queries.Delete(ctx, deleteReq)
if err != nil {
@ -213,17 +196,11 @@ func newGet() *cobra.Command {
// TODO: short flags
cmd.Use = "get QUERY_ID"
cmd.Short = `Get a query definition.`
cmd.Long = `Get a query definition.
cmd.Use = "get ID"
cmd.Short = `Get a query.`
cmd.Long = `Get a query.
Retrieve a query object definition along with contextual permissions
information about the currently authenticated user.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Gets a query.`
cmd.Annotations = make(map[string]string)
@ -234,8 +211,8 @@ func newGet() *cobra.Command {
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{})
promptSpinner <- "No ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.ListQueryObjectsResponseQueryDisplayNameToIdMap(ctx, sql.ListQueriesRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err)
@ -249,7 +226,7 @@ func newGet() *cobra.Command {
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
getReq.QueryId = args[0]
getReq.Id = args[0]
response, err := w.Queries.Get(ctx, getReq)
if err != nil {
@ -286,25 +263,16 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().StringVar(&listReq.Order, "order", listReq.Order, `Name of query attribute to order by.`)
cmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`)
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of queries to return per page.`)
cmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`)
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, ``)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, ``)
cmd.Use = "list"
cmd.Short = `Get a list of queries.`
cmd.Long = `Get a list of queries.
cmd.Short = `List queries.`
cmd.Long = `List queries.
Gets a list of queries. Optionally, this list can be filtered by a search
term.
**Warning**: Calling this API concurrently 10 or more times could result in
throttling, service degradation, or a temporary ban.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Gets a list of queries accessible to the user, ordered by creation time.
**Warning:** Calling this API concurrently 10 or more times could result in
throttling, service degradation, or a temporary ban.`
cmd.Annotations = make(map[string]string)
@ -334,33 +302,33 @@ func newList() *cobra.Command {
return cmd
}
// start restore command
// start list-visualizations command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var restoreOverrides []func(
var listVisualizationsOverrides []func(
*cobra.Command,
*sql.RestoreQueryRequest,
*sql.ListVisualizationsForQueryRequest,
)
func newRestore() *cobra.Command {
func newListVisualizations() *cobra.Command {
cmd := &cobra.Command{}
var restoreReq sql.RestoreQueryRequest
var listVisualizationsReq sql.ListVisualizationsForQueryRequest
// TODO: short flags
cmd.Use = "restore QUERY_ID"
cmd.Short = `Restore a query.`
cmd.Long = `Restore a query.
cmd.Flags().IntVar(&listVisualizationsReq.PageSize, "page-size", listVisualizationsReq.PageSize, ``)
cmd.Flags().StringVar(&listVisualizationsReq.PageToken, "page-token", listVisualizationsReq.PageToken, ``)
cmd.Use = "list-visualizations ID"
cmd.Short = `List visualizations on a query.`
cmd.Long = `List visualizations on a query.
Restore a query that has been moved to the trash. A restored query appears in
list views and searches. You can use restored queries for alerts.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Gets a list of visualizations on a query.`
// This command is being previewed; hide from help output.
cmd.Hidden = true
cmd.Annotations = make(map[string]string)
@ -371,8 +339,8 @@ func newRestore() *cobra.Command {
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{})
promptSpinner <- "No ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.ListQueryObjectsResponseQueryDisplayNameToIdMap(ctx, sql.ListQueriesRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err)
@ -386,13 +354,10 @@ func newRestore() *cobra.Command {
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
restoreReq.QueryId = args[0]
listVisualizationsReq.Id = args[0]
err = w.Queries.Restore(ctx, restoreReq)
if err != nil {
return err
}
return nil
response := w.Queries.ListVisualizations(ctx, listVisualizationsReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
@ -400,8 +365,8 @@ func newRestore() *cobra.Command {
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range restoreOverrides {
fn(cmd, &restoreReq)
for _, fn := range listVisualizationsOverrides {
fn(cmd, &listVisualizationsReq)
}
return cmd
@ -413,41 +378,47 @@ func newRestore() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*sql.QueryEditContent,
*sql.UpdateQueryRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq sql.QueryEditContent
var updateReq sql.UpdateQueryRequest
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `Data source ID maps to the ID of the data source used by the resource and is distinct from the warehouse ID.`)
cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that conveys additional information about this query such as usage notes.`)
cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`)
// TODO: any: options
cmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query to be run.`)
cmd.Flags().Var(&updateReq.RunAsRole, "run-as-role", `Sets the **Run as** role for the object. Supported values: [owner, viewer]`)
// TODO: array: tags
// TODO: complex arg: query
cmd.Use = "update QUERY_ID"
cmd.Short = `Change a query definition.`
cmd.Long = `Change a query definition.
cmd.Use = "update ID UPDATE_MASK"
cmd.Short = `Update a query.`
cmd.Long = `Update a query.
Modify this query definition.
**Note**: You cannot undo this operation.
**Note**: A new version of the Databricks SQL API will soon be available.
[Learn more]
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
Updates a query.
Arguments:
ID:
UPDATE_MASK: Field mask is required to be passed into the PATCH request. Field mask
specifies which fields of the setting payload will be updated. The field
mask needs to be supplied as single string. To specify multiple fields in
the field mask, use comma as the separator (no space).`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(1)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only ID as positional arguments. Provide 'update_mask' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
@ -459,24 +430,10 @@ func newUpdate() *cobra.Command {
return err
}
}
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down."
names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{})
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "")
if err != nil {
return err
}
args = append(args, id)
updateReq.Id = args[0]
if !cmd.Flags().Changed("json") {
updateReq.UpdateMask = args[1]
}
if len(args) != 1 {
return fmt.Errorf("expected to have ")
}
updateReq.QueryId = args[0]
response, err := w.Queries.Update(ctx, updateReq)
if err != nil {

View File

@ -15,9 +15,10 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "query-history",
Short: `Access the history of queries through SQL warehouses.`,
Long: `Access the history of queries through SQL warehouses.`,
Use: "query-history",
Short: `A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT.`,
Long: `A service responsible for storing and retrieving the list of queries run
against SQL endpoints, serverless compute, and DLT.`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
@ -52,7 +53,6 @@ func newList() *cobra.Command {
// TODO: short flags
// TODO: complex arg: filter_by
cmd.Flags().BoolVar(&listReq.IncludeMetrics, "include-metrics", listReq.IncludeMetrics, `Whether to include metrics about query.`)
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`)
@ -60,9 +60,13 @@ func newList() *cobra.Command {
cmd.Short = `List Queries.`
cmd.Long = `List Queries.
List the history of queries through SQL warehouses.
List the history of queries through SQL warehouses, serverless compute, and
DLT.
You can filter by user ID, warehouse ID, status, and time range.`
You can filter by user ID, warehouse ID, status, and time range. Most recently
started queries are returned first (up to max_results in request). The
pagination token returned in response can be used to list subsequent query
statuses.`
cmd.Annotations = make(map[string]string)
@ -76,8 +80,11 @@ func newList() *cobra.Command {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response := w.QueryHistory.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
response, err := w.QueryHistory.List(ctx, listReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.

View File

@ -0,0 +1,253 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package query_visualizations_legacy
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/sql"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "query-visualizations-legacy",
Short: `This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.`,
Long: `This is an evolving API that facilitates the addition and removal of
vizualisations from existing queries within the Databricks Workspace. Data
structures may change over time.
**Note**: A new version of the Databricks SQL API is now available. Please see
the latest version. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
},
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods
cmd.AddCommand(newCreate())
cmd.AddCommand(newDelete())
cmd.AddCommand(newUpdate())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start create command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*sql.CreateQueryVisualizationsLegacyRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq sql.CreateQueryVisualizationsLegacyRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "create"
cmd.Short = `Add visualization to a query.`
cmd.Long = `Add visualization to a query.
Creates visualization in the query.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queryvisualizations/create instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = createJson.Unmarshal(&createReq)
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.QueryVisualizationsLegacy.Create(ctx, createReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range createOverrides {
fn(cmd, &createReq)
}
return cmd
}
// start delete command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*sql.DeleteQueryVisualizationsLegacyRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq sql.DeleteQueryVisualizationsLegacyRequest
// TODO: short flags
cmd.Use = "delete ID"
cmd.Short = `Remove visualization.`
cmd.Long = `Remove visualization.
Removes a visualization from the query.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queryvisualizations/delete instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Arguments:
ID: Widget ID returned by :method:queryvizualisations/create`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
deleteReq.Id = args[0]
err = w.QueryVisualizationsLegacy.Delete(ctx, deleteReq)
if err != nil {
return err
}
return nil
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range deleteOverrides {
fn(cmd, &deleteReq)
}
return cmd
}
// start update command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*sql.LegacyVisualization,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq sql.LegacyVisualization
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "update ID"
cmd.Short = `Edit existing visualization.`
cmd.Long = `Edit existing visualization.
Updates visualization in the query.
**Note**: A new version of the Databricks SQL API is now available. Please use
:method:queryvisualizations/update instead. [Learn more]
[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
Arguments:
ID: The UUID for this visualization.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = updateJson.Unmarshal(&updateReq)
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
updateReq.Id = args[0]
response, err := w.QueryVisualizationsLegacy.Update(ctx, updateReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updateOverrides {
fn(cmd, &updateReq)
}
return cmd
}
// end service QueryVisualizationsLegacy

View File

@ -19,10 +19,10 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "query-visualizations",
Short: `This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.`,
Short: `This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.`,
Long: `This is an evolving API that facilitates the addition and removal of
vizualisations from existing queries within the Databricks Workspace. Data
structures may change over time.`,
visualizations from existing queries in the Databricks Workspace. Data
structures can change over time.`,
GroupID: "sql",
Annotations: map[string]string{
"package": "sql",
@ -51,24 +51,33 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func(
*cobra.Command,
*sql.CreateQueryVisualizationRequest,
*sql.CreateVisualizationRequest,
)
func newCreate() *cobra.Command {
cmd := &cobra.Command{}
var createReq sql.CreateQueryVisualizationRequest
var createReq sql.CreateVisualizationRequest
var createJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: complex arg: visualization
cmd.Use = "create"
cmd.Short = `Add visualization to a query.`
cmd.Long = `Add visualization to a query.`
cmd.Short = `Add a visualization to a query.`
cmd.Long = `Add a visualization to a query.
Adds a visualization to a query.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
@ -79,8 +88,6 @@ func newCreate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.QueryVisualizations.Create(ctx, createReq)
@ -108,22 +115,21 @@ func newCreate() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*sql.DeleteQueryVisualizationRequest,
*sql.DeleteVisualizationRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq sql.DeleteQueryVisualizationRequest
var deleteReq sql.DeleteVisualizationRequest
// TODO: short flags
cmd.Use = "delete ID"
cmd.Short = `Remove visualization.`
cmd.Long = `Remove visualization.
Arguments:
ID: Widget ID returned by :method:queryvizualisations/create`
cmd.Short = `Remove a visualization.`
cmd.Long = `Remove a visualization.
Removes a visualization.`
cmd.Annotations = make(map[string]string)
@ -164,29 +170,44 @@ func newDelete() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*sql.Visualization,
*sql.UpdateVisualizationRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq sql.Visualization
var updateReq sql.UpdateVisualizationRequest
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "update ID"
cmd.Short = `Edit existing visualization.`
cmd.Long = `Edit existing visualization.
// TODO: complex arg: visualization
cmd.Use = "update ID UPDATE_MASK"
cmd.Short = `Update a visualization.`
cmd.Long = `Update a visualization.
Updates a visualization.
Arguments:
ID: The UUID for this visualization.`
ID:
UPDATE_MASK: Field mask is required to be passed into the PATCH request. Field mask
specifies which fields of the setting payload will be updated. The field
mask needs to be supplied as single string. To specify multiple fields in
the field mask, use comma as the separator (no space).`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
if cmd.Flags().Changed("json") {
err := root.ExactArgs(1)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only ID as positional arguments. Provide 'update_mask' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
}
@ -200,10 +221,11 @@ func newUpdate() *cobra.Command {
if err != nil {
return err
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
updateReq.Id = args[0]
if !cmd.Flags().Changed("json") {
updateReq.UpdateMask = args[1]
}
response, err := w.QueryVisualizations.Update(ctx, updateReq)
if err != nil {

View File

@ -80,6 +80,7 @@ func newCreate() *cobra.Command {
cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the recipient.`)
cmd.Flags().StringVar(&createReq.DataRecipientGlobalMetastoreId, "data-recipient-global-metastore-id", createReq.DataRecipientGlobalMetastoreId, `The global Unity Catalog metastore id provided by the data recipient.`)
cmd.Flags().Int64Var(&createReq.ExpirationTime, "expiration-time", createReq.ExpirationTime, `Expiration timestamp of the token, in epoch milliseconds.`)
// TODO: complex arg: ip_access_list
cmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of the recipient owner.`)
// TODO: complex arg: properties_kvpairs
@ -311,6 +312,8 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().StringVar(&listReq.DataRecipientGlobalMetastoreId, "data-recipient-global-metastore-id", listReq.DataRecipientGlobalMetastoreId, `If not provided, all recipients will be returned.`)
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of recipients to return.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "list"
cmd.Short = `List share recipients.`
@ -449,6 +452,9 @@ func newSharePermissions() *cobra.Command {
// TODO: short flags
cmd.Flags().IntVar(&sharePermissionsReq.MaxResults, "max-results", sharePermissionsReq.MaxResults, `Maximum number of permissions to return.`)
cmd.Flags().StringVar(&sharePermissionsReq.PageToken, "page-token", sharePermissionsReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "share-permissions NAME"
cmd.Short = `Get recipient share permissions.`
cmd.Long = `Get recipient share permissions.
@ -523,6 +529,7 @@ func newUpdate() *cobra.Command {
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the recipient.`)
cmd.Flags().Int64Var(&updateReq.ExpirationTime, "expiration-time", updateReq.ExpirationTime, `Expiration timestamp of the token, in epoch milliseconds.`)
// TODO: complex arg: ip_access_list
cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the recipient.`)
cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of the recipient owner.`)

View File

@ -326,6 +326,7 @@ func newGet() *cobra.Command {
// TODO: short flags
cmd.Flags().BoolVar(&getReq.IncludeAliases, "include-aliases", getReq.IncludeAliases, `Whether to include registered model aliases in the response.`)
cmd.Flags().BoolVar(&getReq.IncludeBrowse, "include-browse", getReq.IncludeBrowse, `Whether to include registered models in the response for which the principal can only access selective metadata for.`)
cmd.Use = "get FULL_NAME"

View File

@ -147,6 +147,8 @@ func newDelete() *cobra.Command {
// TODO: short flags
cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the schema is not empty.`)
cmd.Use = "delete FULL_NAME"
cmd.Short = `Delete a schema.`
cmd.Long = `Delete a schema.

View File

@ -254,11 +254,19 @@ func newGet() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory.
var listOverrides []func(
*cobra.Command,
*sharing.ListSharesRequest,
)
func newList() *cobra.Command {
cmd := &cobra.Command{}
var listReq sharing.ListSharesRequest
// TODO: short flags
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of shares to return.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "list"
cmd.Short = `List shares.`
cmd.Long = `List shares.
@ -269,11 +277,17 @@ func newList() *cobra.Command {
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response := w.Shares.List(ctx)
response := w.Shares.List(ctx, listReq)
return cmdio.RenderIterator(ctx, response)
}
@ -283,7 +297,7 @@ func newList() *cobra.Command {
// Apply optional overrides to this command.
for _, fn := range listOverrides {
fn(cmd)
fn(cmd, &listReq)
}
return cmd
@ -305,6 +319,9 @@ func newSharePermissions() *cobra.Command {
// TODO: short flags
cmd.Flags().IntVar(&sharePermissionsReq.MaxResults, "max-results", sharePermissionsReq.MaxResults, `Maximum number of permissions to return.`)
cmd.Flags().StringVar(&sharePermissionsReq.PageToken, "page-token", sharePermissionsReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "share-permissions NAME"
cmd.Short = `Get permissions.`
cmd.Long = `Get permissions.
@ -455,6 +472,8 @@ func newUpdatePermissions() *cobra.Command {
cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: array: changes
cmd.Flags().IntVar(&updatePermissionsReq.MaxResults, "max-results", updatePermissionsReq.MaxResults, `Maximum number of permissions to return.`)
cmd.Flags().StringVar(&updatePermissionsReq.PageToken, "page-token", updatePermissionsReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "update-permissions NAME"
cmd.Short = `Update permissions.`

View File

@ -177,6 +177,9 @@ func newList() *cobra.Command {
// TODO: short flags
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of schemas to return.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "list METASTORE_ID"
cmd.Short = `List system schemas.`
cmd.Long = `List system schemas.

View File

@ -3,6 +3,8 @@
package workspace_bindings
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
@ -35,7 +37,8 @@ func New() *cobra.Command {
(/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which
introduces the ability to bind a securable in READ_ONLY mode (catalogs only).
Securables that support binding: - catalog`,
Securable types that support binding: - catalog - storage_credential -
external_location`,
GroupID: "catalog",
Annotations: map[string]string{
"package": "catalog",
@ -131,6 +134,9 @@ func newGetBindings() *cobra.Command {
// TODO: short flags
cmd.Flags().IntVar(&getBindingsReq.MaxResults, "max-results", getBindingsReq.MaxResults, `Maximum number of workspace bindings to return.`)
cmd.Flags().StringVar(&getBindingsReq.PageToken, "page-token", getBindingsReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
cmd.Use = "get-bindings SECURABLE_TYPE SECURABLE_NAME"
cmd.Short = `Get securable workspace bindings.`
cmd.Long = `Get securable workspace bindings.
@ -139,7 +145,7 @@ func newGetBindings() *cobra.Command {
or an owner of the securable.
Arguments:
SECURABLE_TYPE: The type of the securable.
SECURABLE_TYPE: The type of the securable to bind to a workspace.
SECURABLE_NAME: The name of the securable.`
cmd.Annotations = make(map[string]string)
@ -154,14 +160,14 @@ func newGetBindings() *cobra.Command {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getBindingsReq.SecurableType = args[0]
_, err = fmt.Sscan(args[0], &getBindingsReq.SecurableType)
if err != nil {
return fmt.Errorf("invalid SECURABLE_TYPE: %s", args[0])
}
getBindingsReq.SecurableName = args[1]
response, err := w.WorkspaceBindings.GetBindings(ctx, getBindingsReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
response := w.WorkspaceBindings.GetBindings(ctx, getBindingsReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
@ -275,7 +281,7 @@ func newUpdateBindings() *cobra.Command {
admin or an owner of the securable.
Arguments:
SECURABLE_TYPE: The type of the securable.
SECURABLE_TYPE: The type of the securable to bind to a workspace.
SECURABLE_NAME: The name of the securable.`
cmd.Annotations = make(map[string]string)
@ -296,7 +302,10 @@ func newUpdateBindings() *cobra.Command {
return err
}
}
updateBindingsReq.SecurableType = args[0]
_, err = fmt.Sscan(args[0], &updateBindingsReq.SecurableType)
if err != nil {
return fmt.Errorf("invalid SECURABLE_TYPE: %s", args[0])
}
updateBindingsReq.SecurableName = args[1]
response, err := w.WorkspaceBindings.UpdateBindings(ctx, updateBindingsReq)

8
go.mod
View File

@ -5,7 +5,7 @@ go 1.22
require (
github.com/Masterminds/semver/v3 v3.2.1 // MIT
github.com/briandowns/spinner v1.23.1 // Apache 2.0
github.com/databricks/databricks-sdk-go v0.43.2 // Apache 2.0
github.com/databricks/databricks-sdk-go v0.44.0 // Apache 2.0
github.com/fatih/color v1.17.0 // MIT
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
github.com/google/uuid v1.6.0 // BSD-3-Clause
@ -60,13 +60,13 @@ require (
go.opentelemetry.io/otel v1.24.0 // indirect
go.opentelemetry.io/otel/metric v1.24.0 // indirect
go.opentelemetry.io/otel/trace v1.24.0 // indirect
golang.org/x/crypto v0.23.0 // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/crypto v0.24.0 // indirect
golang.org/x/net v0.26.0 // indirect
golang.org/x/sys v0.23.0 // indirect
golang.org/x/time v0.5.0 // indirect
google.golang.org/api v0.182.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect
google.golang.org/grpc v1.64.0 // indirect
google.golang.org/grpc v1.64.1 // indirect
google.golang.org/protobuf v1.34.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
)

16
go.sum generated
View File

@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.43.2 h1:4B+sHAYO5kFqwZNQRmsF70eecqsFX6i/0KfXoDFQT/E=
github.com/databricks/databricks-sdk-go v0.43.2/go.mod h1:nlzeOEgJ1Tmb5HyknBJ3GEorCZKWqEBoHprvPmTSNq8=
github.com/databricks/databricks-sdk-go v0.44.0 h1:9/FZACv4EFQIOYxfwYVKnY7v46xio9FKCw9tpKB2O/s=
github.com/databricks/databricks-sdk-go v0.44.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -176,8 +176,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI=
golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ=
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc=
@ -192,8 +192,8 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
@ -244,8 +244,8 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY=
google.golang.org/grpc v1.64.0/go.mod h1:oxjF8E3FBnjp+/gVFYdWacaLDx9na1aqy9oovLpxQYg=
google.golang.org/grpc v1.64.1 h1:LKtvyfbX3UGVPFcGqJ9ItpVWW6oN/2XqTxfAnwRRXiA=
google.golang.org/grpc v1.64.1/go.mod h1:hiQF4LFZelK2WKaP6W0L92zGHtiQdZxk8CrSdvyjeP0=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=

View File

@ -134,9 +134,7 @@ func loadInteractiveClusters(ctx context.Context, w *databricks.WorkspaceClient,
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "Loading list of clusters to select from"
defer close(promptSpinner)
all, err := w.Clusters.ListAll(ctx, compute.ListClustersRequest{
CanUseClient: "NOTEBOOKS",
})
all, err := w.Clusters.ListAll(ctx, compute.ListClustersRequest{})
if err != nil {
return nil, fmt.Errorf("list clusters: %w", err)
}

View File

@ -70,7 +70,7 @@ func TestFirstCompatibleCluster(t *testing.T) {
cfg, server := qa.HTTPFixtures{
{
Method: "GET",
Resource: "/api/2.0/clusters/list?can_use_client=NOTEBOOKS",
Resource: "/api/2.1/clusters/list?",
Response: compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{
{
@ -100,7 +100,7 @@ func TestFirstCompatibleCluster(t *testing.T) {
},
{
Method: "GET",
Resource: "/api/2.0/clusters/spark-versions",
Resource: "/api/2.1/clusters/spark-versions",
Response: compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{
@ -125,7 +125,7 @@ func TestNoCompatibleClusters(t *testing.T) {
cfg, server := qa.HTTPFixtures{
{
Method: "GET",
Resource: "/api/2.0/clusters/list?can_use_client=NOTEBOOKS",
Resource: "/api/2.1/clusters/list?",
Response: compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{
{
@ -147,7 +147,7 @@ func TestNoCompatibleClusters(t *testing.T) {
},
{
Method: "GET",
Resource: "/api/2.0/clusters/spark-versions",
Resource: "/api/2.1/clusters/spark-versions",
Response: compute.GetSparkVersionsResponse{
Versions: []compute.SparkVersion{
{