Annotate generated commands with OpenAPI package name (#466)

Co-authored-by: Serge Smertin <259697+nfx@users.noreply.github.com>
This commit is contained in:
Pieter Noordhuis 2023-06-13 17:20:42 +02:00 committed by GitHub
parent d38649088c
commit f219a0da5a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
73 changed files with 219 additions and 0 deletions

View File

@ -24,6 +24,9 @@ var Cmd = &cobra.Command{
Short: `{{.Summary | without "`"}}`,
Long: `{{.Comment " " 80 | without "`"}}`,
{{- end}}
Annotations: map[string]string{
"package": "{{ .Package.Name }}",
},
}
{{- $serviceName := .KebabName -}}

View File

@ -18,6 +18,9 @@ var Cmd = &cobra.Command{
Long: `These APIs manage access rules on resources in an account. Currently, only
grant rules are supported. A grant rule specifies a role assigned to a set of
principals. A list of rules attached to a resource is called a rule set.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start get command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Short: `This API allows you to download billable usage logs for the specified account and date range.`,
Long: `This API allows you to download billable usage logs for the specified account
and date range. This feature works with all account types.`,
Annotations: map[string]string{
"package": "billing",
},
}
// start download command

View File

@ -17,6 +17,9 @@ var Cmd = &cobra.Command{
Short: `These APIs manage budget configuration including notifications for exceeding a budget for a period.`,
Long: `These APIs manage budget configuration including notifications for exceeding a
budget for a period. They can also retrieve the status of each budget.`,
Annotations: map[string]string{
"package": "billing",
},
}
// start create command

View File

@ -20,6 +20,9 @@ var Cmd = &cobra.Command{
Databricks can deploy clusters in the appropriate VPC for the new workspace. A
credential configuration encapsulates this role information, and its ID is
used when creating a new workspace.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -22,6 +22,9 @@ var Cmd = &cobra.Command{
**Note:** You can only add/use the OAuth custom application integrations when
OAuth enrollment status is enabled. For more details see
:method:OAuthEnrollment/create`,
Annotations: map[string]string{
"package": "oauth2",
},
}
// start create command

View File

@ -31,6 +31,9 @@ var Cmd = &cobra.Command{
encryption requires that the workspace is on the E2 version of the platform.
If you have an older workspace, it might not be on the E2 version of the
platform. If you are not sure, contact your Databricks representative.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -22,6 +22,9 @@ var Cmd = &cobra.Command{
in Unity Catalog to groups, instead of to users individually. All Databricks
account identities can be assigned as members of groups, and members inherit
permissions that are assigned to their group.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start create command

View File

@ -37,6 +37,9 @@ var Cmd = &cobra.Command{
After changes to the account-level IP access lists, it can take a few minutes
for changes to take effect.`,
Annotations: map[string]string{
"package": "settings",
},
}
// start create command

View File

@ -75,6 +75,9 @@ var Cmd = &cobra.Command{
[Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
[Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
[create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html`,
Annotations: map[string]string{
"package": "billing",
},
}
// start create command

View File

@ -16,6 +16,9 @@ var Cmd = &cobra.Command{
Use: "metastore-assignments",
Short: `These APIs manage metastore assignments to a workspace.`,
Long: `These APIs manage metastore assignments to a workspace.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -15,6 +15,9 @@ var Cmd = &cobra.Command{
Short: `These APIs manage Unity Catalog metastores for an account.`,
Long: `These APIs manage Unity Catalog metastores for an account. A metastore
contains catalogs that can be associated with workspaces`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -17,6 +17,9 @@ var Cmd = &cobra.Command{
Short: `These APIs manage network configurations for customer-managed VPCs (optional).`,
Long: `These APIs manage network configurations for customer-managed VPCs (optional).
Its ID is used when creating a new workspace if you use customer-managed VPCs.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -18,6 +18,9 @@ var Cmd = &cobra.Command{
**Note:** Your account must be on the E2 version to use these APIs, this is
because OAuth is only supported on the E2 version.`,
Annotations: map[string]string{
"package": "oauth2",
},
}
// start create command

View File

@ -16,6 +16,9 @@ var Cmd = &cobra.Command{
Use: "private-access",
Short: `These APIs manage private access settings for this account.`,
Long: `These APIs manage private access settings for this account.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -20,6 +20,9 @@ var Cmd = &cobra.Command{
**Note:** You can only add/use the OAuth published application integrations
when OAuth enrollment status is enabled. For more details see
:method:OAuthEnrollment/create`,
Annotations: map[string]string{
"package": "oauth2",
},
}
// start create command

View File

@ -28,6 +28,9 @@ var Cmd = &cobra.Command{
[Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html
[Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal`,
Annotations: map[string]string{
"package": "oauth2",
},
}
// start create command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
production data run with service principals, interactive users do not need any
write, delete, or modify privileges in production. This eliminates the risk of
a user overwriting production data by accident.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start create command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Use: "settings",
Short: `TBD.`,
Long: `TBD`,
Annotations: map[string]string{
"package": "settings",
},
}
// start read-personal-compute-setting command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Use: "storage-credentials",
Short: `These APIs manage storage credentials for a particular metastore.`,
Long: `These APIs manage storage credentials for a particular metastore.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
bucket for storage of non-production DBFS data. A storage configuration
encapsulates this bucket information, and its ID is used when creating a new
workspace.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -26,6 +26,9 @@ var Cmd = &cobra.Command{
provider and that users account will also be removed from Databricks
account. This ensures a consistent offboarding process and prevents
unauthorized users from accessing sensitive data.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start create command

View File

@ -16,6 +16,9 @@ var Cmd = &cobra.Command{
Use: "vpc-endpoints",
Short: `These APIs manage VPC endpoint configurations for this account.`,
Long: `These APIs manage VPC endpoint configurations for this account.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -17,6 +17,9 @@ var Cmd = &cobra.Command{
Short: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`,
Long: `The Workspace Permission Assignment API allows you to manage workspace
permissions for principals in your account.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start delete command

View File

@ -24,6 +24,9 @@ var Cmd = &cobra.Command{
These endpoints are available if your account is on the E2 version of the
platform or on a select custom plan that allows multiple workspaces per
account.`,
Annotations: map[string]string{
"package": "provisioning",
},
}
// start create command

View File

@ -20,6 +20,9 @@ var Cmd = &cobra.Command{
its result, and notifies one or more users and/or notification destinations if
the condition was met. Alerts can be scheduled using the sql_task type of
the Jobs API, e.g. :method:jobs/create.`,
Annotations: map[string]string{
"package": "sql",
},
}
// start create command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
data centrally across all of the workspaces in a Databricks account. Users in
different workspaces can share access to the same data, depending on
privileges granted centrally in Unity Catalog.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -39,6 +39,9 @@ var Cmd = &cobra.Command{
Only admin users can create, edit, and delete policies. Admin users also have
access to all policies.`,
Annotations: map[string]string{
"package": "compute",
},
}
// start create command

View File

@ -43,6 +43,9 @@ var Cmd = &cobra.Command{
recently terminated by the job scheduler. To keep an all-purpose cluster
configuration even after it has been terminated for more than 30 days, an
administrator can pin a cluster to the cluster list.`,
Annotations: map[string]string{
"package": "compute",
},
}
// start change-owner command

View File

@ -26,6 +26,9 @@ var Cmd = &cobra.Command{
may create different types of connections with each connection having a unique
set of configuration options to support credential management and other
settings.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -13,6 +13,9 @@ var Cmd = &cobra.Command{
Short: `This API allows retrieving information about currently authenticated user or service principal.`,
Long: `This API allows retrieving information about currently authenticated user or
service principal.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start me command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
since you can get a dashboard definition with a GET request and then POST it
to create a new one. Dashboards can be scheduled using the sql_task type of
the Jobs API, e.g. :method:jobs/create.`,
Annotations: map[string]string{
"package": "sql",
},
}
// start create command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
in your workspace. We advise you to use any text editor, REST client, or
grep to search the response from this API for the name of your SQL warehouse
as it appears in Databricks SQL.`,
Annotations: map[string]string{
"package": "sql",
},
}
// start list command

View File

@ -24,6 +24,9 @@ var Cmd = &cobra.Command{
Experiments are located in the workspace file tree. You manage experiments
using the same tools you use to manage other workspace objects such as
folders, notebooks, and libraries.`,
Annotations: map[string]string{
"package": "ml",
},
}
// start create-experiment command

View File

@ -26,6 +26,9 @@ var Cmd = &cobra.Command{
To create external locations, you must be a metastore admin or a user with the
**CREATE_EXTERNAL_LOCATION** privilege.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
invoked wherever a table reference is allowed in a query. In Unity Catalog, a
function resides at the same level as a table, so it can be referenced with
the form __catalog_name__.__schema_name__.__function_name__.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
See [more info].
[more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html`,
Annotations: map[string]string{
"package": "workspace",
},
}
// start create command

View File

@ -24,6 +24,9 @@ var Cmd = &cobra.Command{
script returns with a bad exit code, the Apache Spark container fails to
launch and init scripts with later position are skipped. If enough containers
fail, the entire cluster fails with a GLOBAL_INIT_SCRIPT_FAILURE error code.`,
Annotations: map[string]string{
"package": "compute",
},
}
// start create command

View File

@ -26,6 +26,9 @@ var Cmd = &cobra.Command{
automatically grants the privilege to all current and future objects within
the catalog. Similarly, privileges granted on a schema are inherited by all
current and future objects within that schema.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start get command

View File

@ -22,6 +22,9 @@ var Cmd = &cobra.Command{
in Unity Catalog to groups, instead of to users individually. All Databricks
workspace identities can be assigned as members of groups, and members inherit
permissions that are assigned to their group.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start create command

View File

@ -33,6 +33,9 @@ var Cmd = &cobra.Command{
Databricks does not charge DBUs while instances are idle in the pool. Instance
provider billing does apply. See pricing.`,
Annotations: map[string]string{
"package": "compute",
},
}
// start create command

View File

@ -19,6 +19,9 @@ var Cmd = &cobra.Command{
instance profiles for more information.
[Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html`,
Annotations: map[string]string{
"package": "compute",
},
}
// start add command

View File

@ -36,6 +36,9 @@ var Cmd = &cobra.Command{
After changes to the IP access list feature, it can take a few minutes for
changes to take effect.`,
Annotations: map[string]string{
"package": "settings",
},
}
// start create command

View File

@ -34,6 +34,9 @@ var Cmd = &cobra.Command{
[Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html
[Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html
[Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets`,
Annotations: map[string]string{
"package": "jobs",
},
}
// start cancel-all-runs command

View File

@ -35,6 +35,9 @@ var Cmd = &cobra.Command{
When you uninstall a library from a cluster, the library is removed only when
you restart the cluster. Until you restart the cluster, the status of the
uninstalled library appears as Uninstall pending restart.`,
Annotations: map[string]string{
"package": "compute",
},
}
// start all-cluster-statuses command

View File

@ -28,6 +28,9 @@ var Cmd = &cobra.Command{
workspaces created before Unity Catalog was released. If your workspace
includes a legacy Hive metastore, the data in that metastore is available in a
catalog named hive_metastore.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start assign command

View File

@ -17,6 +17,9 @@ var Cmd = &cobra.Command{
Short: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`,
Long: `MLflow Model Registry is a centralized model repository and a UI and set of
APIs that enable you to manage the full lifecycle of MLflow Models.`,
Annotations: map[string]string{
"package": "ml",
},
}
// start approve-transition-request command

View File

@ -15,6 +15,9 @@ var Cmd = &cobra.Command{
Short: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`,
Long: `Permissions API are used to create read, write, edit, update and manage access
for various users on different objects and endpoints.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start get command

View File

@ -30,6 +30,9 @@ var Cmd = &cobra.Command{
quality with Delta Live Tables expectations. Expectations allow you to define
expected data quality and specify how to handle records that fail those
expectations.`,
Annotations: map[string]string{
"package": "pipelines",
},
}
// start create command

View File

@ -22,6 +22,9 @@ var Cmd = &cobra.Command{
Policy families cannot be used directly to create clusters. Instead, you
create cluster policies using a policy family. Cluster policies created using
a policy family inherit the policy family's policy definition.`,
Annotations: map[string]string{
"package": "compute",
},
}
// start get command

View File

@ -16,6 +16,9 @@ var Cmd = &cobra.Command{
Use: "providers",
Short: `Databricks Providers REST API.`,
Long: `Databricks Providers REST API`,
Annotations: map[string]string{
"package": "sharing",
},
}
// start create command

View File

@ -19,6 +19,9 @@ var Cmd = &cobra.Command{
definitions include the target SQL warehouse, query text, name, description,
tags, parameters, and visualizations. Queries can be scheduled using the
sql_task type of the Jobs API, e.g. :method:jobs/create.`,
Annotations: map[string]string{
"package": "sql",
},
}
// start create command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Use: "query-history",
Short: `Access the history of queries through SQL warehouses.`,
Long: `Access the history of queries through SQL warehouses.`,
Annotations: map[string]string{
"package": "sql",
},
}
// start list command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Use: "recipient-activation",
Short: `Databricks Recipient Activation REST API.`,
Long: `Databricks Recipient Activation REST API`,
Annotations: map[string]string{
"package": "sharing",
},
}
// start get-activation-url-info command

View File

@ -16,6 +16,9 @@ var Cmd = &cobra.Command{
Use: "recipients",
Short: `Databricks Recipients REST API.`,
Long: `Databricks Recipients REST API`,
Annotations: map[string]string{
"package": "sharing",
},
}
// start create command

View File

@ -25,6 +25,9 @@ var Cmd = &cobra.Command{
Within Repos you can develop code in notebooks or other files and follow data
science and engineering code development best practices using Git for version
control, collaboration, and CI/CD.`,
Annotations: map[string]string{
"package": "workspace",
},
}
// start create command

View File

@ -20,6 +20,9 @@ var Cmd = &cobra.Command{
access (or list) a table or view in a schema, users must have the USE_SCHEMA
data permission on the schema and its parent catalog, and they must have the
SELECT permission on the table or view.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -27,6 +27,9 @@ var Cmd = &cobra.Command{
Databricks secrets. While Databricks makes an effort to redact secret values
that might be displayed in notebooks, it is not possible to prevent such users
from reading secrets.`,
Annotations: map[string]string{
"package": "workspace",
},
}
// start create-scope command

View File

@ -21,6 +21,9 @@ var Cmd = &cobra.Command{
production data run with service principals, interactive users do not need any
write, delete, or modify privileges in production. This eliminates the risk of
a user overwriting production data by accident.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start create command

View File

@ -29,6 +29,9 @@ var Cmd = &cobra.Command{
define how requests should be routed to your served models behind an endpoint.
Additionally, you can configure the scale of resources that should be applied
to each served model.`,
Annotations: map[string]string{
"package": "serving",
},
}
// start build-logs command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Use: "shares",
Short: `Databricks Shares REST API.`,
Long: `Databricks Shares REST API`,
Annotations: map[string]string{
"package": "sharing",
},
}
// start create command

View File

@ -28,6 +28,9 @@ var Cmd = &cobra.Command{
To create storage credentials, you must be a Databricks account admin. The
account admin who creates the storage credential can delegate ownership to
another user or group to manage permissions on it.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -16,6 +16,9 @@ var Cmd = &cobra.Command{
Long: `A system schema is a schema that lives within the system catalog. A system
schema may contain information about customer usage of Unity Catalog such as
audit-logs, billing-logs, lineage information, etc.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start disable command

View File

@ -28,6 +28,9 @@ var Cmd = &cobra.Command{
You can declare primary keys and foreign keys as part of the table
specification during table creation. You can also add or drop constraints on
existing tables.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -24,6 +24,9 @@ var Cmd = &cobra.Command{
A table can be managed or external. From an API perspective, a __VIEW__ is a
particular kind of table (rather than a managed or external table).`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start delete command

View File

@ -18,6 +18,9 @@ var Cmd = &cobra.Command{
Long: `Enables administrators to get all tokens and delete tokens for other users.
Admins can either get every token, get a specific token by ID, or get all
tokens for a particular user.`,
Annotations: map[string]string{
"package": "settings",
},
}
// start create-obo-token command

View File

@ -17,6 +17,9 @@ var Cmd = &cobra.Command{
Short: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`,
Long: `The Token API allows you to create, list, and revoke tokens that can be used
to authenticate and access Databricks REST APIs.`,
Annotations: map[string]string{
"package": "settings",
},
}
// start create command

View File

@ -26,6 +26,9 @@ var Cmd = &cobra.Command{
provider and that users account will also be removed from Databricks
workspace. This ensures a consistent offboarding process and prevents
unauthorized users from accessing sensitive data.`,
Annotations: map[string]string{
"package": "iam",
},
}
// start create command

View File

@ -23,6 +23,9 @@ var Cmd = &cobra.Command{
storing library and config files of arbitrary formats such as .whl or .txt
centrally and providing secure access across workspaces to it, or transforming
and querying non-tabular data files in ETL.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start create command

View File

@ -19,6 +19,9 @@ var Cmd = &cobra.Command{
Long: `A SQL warehouse is a compute resource that lets you run SQL commands on data
objects within Databricks SQL. Compute resources are infrastructure resources
that provide processing capabilities in the cloud.`,
Annotations: map[string]string{
"package": "sql",
},
}
// start create command

View File

@ -19,6 +19,9 @@ var Cmd = &cobra.Command{
A catalog's workspace bindings can be configured by a metastore admin or the
owner of the catalog.`,
Annotations: map[string]string{
"package": "catalog",
},
}
// start get command

View File

@ -14,6 +14,9 @@ var Cmd = &cobra.Command{
Use: "workspace-conf",
Short: `This API allows updating known workspace settings for advanced users.`,
Long: `This API allows updating known workspace settings for advanced users.`,
Annotations: map[string]string{
"package": "settings",
},
}
// start get-status command

View File

@ -20,6 +20,9 @@ var Cmd = &cobra.Command{
A notebook is a web-based interface to a document that contains runnable code,
visualizations, and explanatory text.`,
Annotations: map[string]string{
"package": "workspace",
},
}
// start delete command