2022-10-10 08:27:45 +00:00
|
|
|
package api
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"strings"
|
|
|
|
|
2024-03-12 14:12:34 +00:00
|
|
|
"github.com/databricks/cli/cmd/root"
|
2023-05-16 16:35:39 +00:00
|
|
|
"github.com/databricks/cli/libs/cmdio"
|
|
|
|
"github.com/databricks/cli/libs/flags"
|
2022-11-24 20:41:57 +00:00
|
|
|
"github.com/databricks/databricks-sdk-go/client"
|
|
|
|
"github.com/databricks/databricks-sdk-go/config"
|
2022-10-10 08:27:45 +00:00
|
|
|
"github.com/spf13/cobra"
|
|
|
|
)
|
|
|
|
|
2023-07-27 10:03:08 +00:00
|
|
|
func New() *cobra.Command {
|
|
|
|
cmd := &cobra.Command{
|
|
|
|
Use: "api",
|
|
|
|
Short: "Perform Databricks API call",
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.AddCommand(
|
|
|
|
makeCommand(http.MethodGet),
|
|
|
|
makeCommand(http.MethodHead),
|
|
|
|
makeCommand(http.MethodPost),
|
|
|
|
makeCommand(http.MethodPut),
|
|
|
|
makeCommand(http.MethodPatch),
|
|
|
|
makeCommand(http.MethodDelete),
|
|
|
|
)
|
|
|
|
|
|
|
|
return cmd
|
2022-10-10 08:27:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func makeCommand(method string) *cobra.Command {
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
var payload flags.JsonFlag
|
2022-10-10 08:27:45 +00:00
|
|
|
|
|
|
|
command := &cobra.Command{
|
|
|
|
Use: strings.ToLower(method),
|
2024-03-12 14:12:34 +00:00
|
|
|
Args: root.ExactArgs(1),
|
2022-10-10 08:27:45 +00:00
|
|
|
Short: fmt.Sprintf("Perform %s request", method),
|
|
|
|
RunE: func(cmd *cobra.Command, args []string) error {
|
|
|
|
path := args[0]
|
|
|
|
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
var request any
|
2024-10-11 14:39:53 +00:00
|
|
|
diags := payload.Unmarshal(&request)
|
|
|
|
if diags.HasError() {
|
|
|
|
return diags.Error()
|
2022-10-10 08:27:45 +00:00
|
|
|
}
|
|
|
|
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
cfg := &config.Config{}
|
|
|
|
|
|
|
|
// command-line flag can specify the profile in use
|
|
|
|
profileFlag := cmd.Flag("profile")
|
|
|
|
if profileFlag != nil {
|
|
|
|
cfg.Profile = profileFlag.Value.String()
|
|
|
|
}
|
|
|
|
|
|
|
|
api, err := client.New(cfg)
|
2022-11-09 14:01:47 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
|
|
|
|
var response any
|
2023-09-05 09:43:57 +00:00
|
|
|
headers := map[string]string{"Content-Type": "application/json"}
|
Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 (#2238)
Bumps
[github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go)
from 0.55.0 to 0.56.1.
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's
releases</a>.</em></p>
<blockquote>
<h2>v0.56.1</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Do not send query parameters when set to zero value (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1136">#1136</a>).</li>
</ul>
<h2>v0.56.0</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Support Query parameters for all HTTP operations (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1124">#1124</a>).</li>
</ul>
<h3>Internal Changes</h3>
<ul>
<li>Add download target to MakeFile (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1125">#1125</a>).</li>
<li>Delete examples/mocking module (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1126">#1126</a>).</li>
<li>Scope the traversing directory in the Recursive list workspace test
(<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1120">#1120</a>).</li>
</ul>
<h3>API Changes:</h3>
<ul>
<li>Added <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/iam#AccessControlAPI">w.AccessControl</a>
workspace-level service.</li>
<li>Added <code>HttpRequest</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Added <code>ReviewState</code>, <code>Reviews</code> and
<code>RunnerCollaborators</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetNotebook">cleanrooms.CleanRoomAssetNotebook</a>.</li>
<li>Added <code>CleanRoomsNotebookOutput</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#RunOutput">jobs.RunOutput</a>.</li>
<li>Added <code>RunAsRepl</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SparkJarTask">jobs.SparkJarTask</a>.</li>
<li>Added <code>Scopes</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/oauth2#UpdateCustomAppIntegration">oauth2.UpdateCustomAppIntegration</a>.</li>
<li>Added <code>Contents</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse">serving.GetOpenApiResponse</a>.</li>
<li>Added <code>Activated</code>, <code>ActivationUrl</code>,
<code>AuthenticationType</code>, <code>Cloud</code>,
<code>Comment</code>, <code>CreatedAt</code>, <code>CreatedBy</code>,
<code>DataRecipientGlobalMetastoreId</code>, <code>IpAccessList</code>,
<code>MetastoreId</code>, <code>Name</code>, <code>Owner</code>,
<code>PropertiesKvpairs</code>, <code>Region</code>,
<code>SharingCode</code>, <code>Tokens</code>, <code>UpdatedAt</code>
and <code>UpdatedBy</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>ExpirationTime</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>Pending</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetStatusEnum">cleanrooms.CleanRoomAssetStatusEnum</a>.</li>
<li>Added <code>AddNodesFailed</code>,
<code>AutomaticClusterUpdate</code>, <code>AutoscalingBackoff</code> and
<code>AutoscalingFailed</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/compute#EventType">compute.EventType</a>.</li>
<li>Added <code>PendingWarehouse</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#MessageStatus">dashboards.MessageStatus</a>.</li>
<li>Added <code>Cpu</code>, <code>GpuLarge</code>,
<code>GpuMedium</code>, <code>GpuSmall</code> and
<code>MultigpuMedium</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to type <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service.</li>
<li>Changed <code>Create</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service with new required argument order.</li>
<li>Changed <code>GetOpenApi</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to type <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags">serving.EndpointTags</a>.</li>
<li>Changed <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList">serving.EndpointTagList</a>
to.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Config</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#CreateServingEndpoint">serving.CreateServingEndpoint</a>
to no longer be required.</li>
<li>Changed <code>ProjectId</code> and <code>Region</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig">serving.GoogleCloudVertexAiConfig</a>
to be required.</li>
<li>Changed <code>ProjectId</code> and <code>Region</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig">serving.GoogleCloudVertexAiConfig</a>
to be required.</li>
<li>Changed <code>WorkloadType</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityInput">serving.ServedEntityInput</a>
to type <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
<li>Changed <code>WorkloadType</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityOutput">serving.ServedEntityOutput</a>
to type <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
</ul>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's
changelog</a>.</em></p>
<blockquote>
<h2>[Release] Release v0.56.1</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Do not send query parameters when set to zero value (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1136">#1136</a>).</li>
</ul>
<h2>[Release] Release v0.56.0</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Support Query parameters for all HTTP operations (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1124">#1124</a>).</li>
</ul>
<h3>Internal Changes</h3>
<ul>
<li>Add download target to MakeFile (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1125">#1125</a>).</li>
<li>Delete examples/mocking module (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1126">#1126</a>).</li>
<li>Scope the traversing directory in the Recursive list workspace test
(<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1120">#1120</a>).</li>
</ul>
<h3>API Changes:</h3>
<ul>
<li>Added <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/iam#AccessControlAPI">w.AccessControl</a>
workspace-level service.</li>
<li>Added <code>HttpRequest</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Added <code>ReviewState</code>, <code>Reviews</code> and
<code>RunnerCollaborators</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetNotebook">cleanrooms.CleanRoomAssetNotebook</a>.</li>
<li>Added <code>CleanRoomsNotebookOutput</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#RunOutput">jobs.RunOutput</a>.</li>
<li>Added <code>RunAsRepl</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SparkJarTask">jobs.SparkJarTask</a>.</li>
<li>Added <code>Scopes</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/oauth2#UpdateCustomAppIntegration">oauth2.UpdateCustomAppIntegration</a>.</li>
<li>Added <code>Contents</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse">serving.GetOpenApiResponse</a>.</li>
<li>Added <code>Activated</code>, <code>ActivationUrl</code>,
<code>AuthenticationType</code>, <code>Cloud</code>,
<code>Comment</code>, <code>CreatedAt</code>, <code>CreatedBy</code>,
<code>DataRecipientGlobalMetastoreId</code>, <code>IpAccessList</code>,
<code>MetastoreId</code>, <code>Name</code>, <code>Owner</code>,
<code>PropertiesKvpairs</code>, <code>Region</code>,
<code>SharingCode</code>, <code>Tokens</code>, <code>UpdatedAt</code>
and <code>UpdatedBy</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>ExpirationTime</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>Pending</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetStatusEnum">cleanrooms.CleanRoomAssetStatusEnum</a>.</li>
<li>Added <code>AddNodesFailed</code>,
<code>AutomaticClusterUpdate</code>, <code>AutoscalingBackoff</code> and
<code>AutoscalingFailed</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/compute#EventType">compute.EventType</a>.</li>
<li>Added <code>PendingWarehouse</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#MessageStatus">dashboards.MessageStatus</a>.</li>
<li>Added <code>Cpu</code>, <code>GpuLarge</code>,
<code>GpuMedium</code>, <code>GpuSmall</code> and
<code>MultigpuMedium</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to type <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service.</li>
<li>Changed <code>Create</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service with new required argument order.</li>
<li>Changed <code>GetOpenApi</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to type <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags">serving.EndpointTags</a>.</li>
<li>Changed <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList">serving.EndpointTagList</a>
to.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Config</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#CreateServingEndpoint">serving.CreateServingEndpoint</a>
to no longer be required.</li>
</ul>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/bf617bb7a6f46370b94886dd674e4721b17224fd"><code>bf617bb</code></a>
[Release] Release v0.56.1 (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1137">#1137</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/18cebf1d5ca8889ae82f660c96fecc8bc5b73be5"><code>18cebf1</code></a>
[Fix] Do not send query parameters when set to zero value (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1136">#1136</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/28ff749ee2271172ceda01aaaa6e997e8c2aebd7"><code>28ff749</code></a>
[Release] Release v0.56.0 (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1134">#1134</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/113454080f34e4da04782895ea5d61101bf2b425"><code>1134540</code></a>
[Internal] Add download target to MakeFile (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1125">#1125</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/e079db96f33d53d6659b222a905da366dbab576b"><code>e079db9</code></a>
[Fix] Support Query parameters for all HTTP operations (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1124">#1124</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/1045fb9697db505f5fd1ca0ebe4be8b6479df981"><code>1045fb9</code></a>
[Internal] Delete examples/mocking module (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1126">#1126</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/914ab6b7e8e48ca6da6803c10c2d720ba496cd87"><code>914ab6b</code></a>
[Internal] Scope the traversing directory in the Recursive list
workspace tes...</li>
<li>See full diff in <a
href="https://github.com/databricks/databricks-sdk-go/compare/v0.55.0...v0.56.1">compare
view</a></li>
</ul>
</details>
<br />
<details>
<summary>Most Recent Ignore Conditions Applied to This Pull
Request</summary>
| Dependency Name | Ignore Conditions |
| --- | --- |
| github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] |
</details>
[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.55.0&new-version=0.56.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
<details>
<summary>Dependabot commands and options</summary>
<br />
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
</details>
---------
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2025-01-27 13:11:07 +00:00
|
|
|
err = api.Do(cmd.Context(), method, path, headers, nil, request, &response)
|
2022-10-10 08:27:45 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
return cmdio.Render(cmd.Context(), response)
|
2022-10-10 08:27:45 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
command.Flags().Var(&payload, "json", `either inline JSON string or @path/to/file.json with request body`)
|
2022-10-10 08:27:45 +00:00
|
|
|
return command
|
|
|
|
}
|