Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
package root
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
|
2023-05-16 16:35:39 +00:00
|
|
|
"github.com/databricks/cli/bundle"
|
|
|
|
"github.com/databricks/cli/libs/cmdio"
|
Added OpenAPI command coverage (#357)
This PR adds the following command groups:
## Workspace-level command groups
* `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts.
* `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace.
* `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules.
* `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
* `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal.
* `bricks dashboards` - In general, there is little need to modify dashboards using the API.
* `bricks data-sources` - This API is provided to assist you in making new query objects.
* `bricks experiments` - MLflow Experiment tracking.
* `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
* `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog.
* `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user.
* `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
* `bricks grants` - In Unity Catalog, data is secure by default.
* `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects.
* `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
* `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
* `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists.
* `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs.
* `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
* `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog.
* `bricks model-registry` - MLflow Model Registry commands.
* `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
* `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
* `bricks policy-families` - View available policy families.
* `bricks providers` - Databricks Providers REST API.
* `bricks queries` - These endpoints are used for CRUD operations on query definitions.
* `bricks query-history` - Access the history of queries through SQL warehouses.
* `bricks recipient-activation` - Databricks Recipient Activation REST API.
* `bricks recipients` - Databricks Recipients REST API.
* `bricks repos` - The Repos API allows users to manage their git repos.
* `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
* `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions.
* `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
* `bricks shares` - Databricks Shares REST API.
* `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
* `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables.
* `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace.
* `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users.
* `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
* `bricks users` - User identities recognized by Databricks and represented by email addresses.
* `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
* `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
* `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders.
* `bricks workspace-conf` - This API allows updating known workspace settings for advanced users.
## Account-level command groups
* `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range.
* `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period.
* `bricks account credentials` - These APIs manage credential configurations for this workspace.
* `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional).
* `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects.
* `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.
* `bricks account log-delivery` - These APIs manage log delivery configurations for this account.
* `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace.
* `bricks account metastores` - These APIs manage Unity Catalog metastores for an account.
* `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional).
* `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.
* `bricks account private-access` - These APIs manage private access settings for this account.
* `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
* `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
* `bricks account storage` - These APIs manage storage configurations for this workspace.
* `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore.
* `bricks account users` - User identities recognized by Databricks and represented by email addresses.
* `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account.
* `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.
* `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
|
|
|
"github.com/databricks/databricks-sdk-go"
|
|
|
|
"github.com/databricks/databricks-sdk-go/config"
|
|
|
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
|
|
|
"github.com/manifoldco/promptui"
|
|
|
|
"github.com/spf13/cobra"
|
|
|
|
"gopkg.in/ini.v1"
|
|
|
|
)
|
|
|
|
|
|
|
|
// Placeholders to use as unique keys in context.Context.
|
|
|
|
var workspaceClient int
|
|
|
|
var accountClient int
|
|
|
|
var currentUser int
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
RootCmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile")
|
|
|
|
}
|
|
|
|
|
|
|
|
func MustAccountClient(cmd *cobra.Command, args []string) error {
|
|
|
|
cfg := &config.Config{}
|
|
|
|
|
|
|
|
// command-line flag can specify the profile in use
|
|
|
|
profileFlag := cmd.Flag("profile")
|
|
|
|
if profileFlag != nil {
|
|
|
|
cfg.Profile = profileFlag.Value.String()
|
|
|
|
}
|
|
|
|
|
|
|
|
if cfg.Profile == "" {
|
|
|
|
// account-level CLI was not really done before, so here are the assumptions:
|
|
|
|
// 1. only admins will have account configured
|
|
|
|
// 2. 99% of admins will have access to just one account
|
|
|
|
// hence, we don't need to create a special "DEFAULT_ACCOUNT" profile yet
|
|
|
|
profiles, err := loadProfiles()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
var items []profile
|
|
|
|
for _, v := range profiles {
|
|
|
|
if v.AccountID == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
items = append(items, v)
|
|
|
|
}
|
|
|
|
if len(items) == 1 {
|
|
|
|
cfg.Profile = items[0].Name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TRY_AUTH: // or try picking a config profile dynamically
|
|
|
|
a, err := databricks.NewAccountClient((*databricks.Config)(cfg))
|
|
|
|
if cmdio.IsInteractive(cmd.Context()) && errors.Is(err, databricks.ErrNotAccountClient) {
|
|
|
|
profile, err := askForAccountProfile()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
cfg = &config.Config{Profile: profile}
|
|
|
|
goto TRY_AUTH
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.SetContext(context.WithValue(cmd.Context(), &accountClient, a))
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func MustWorkspaceClient(cmd *cobra.Command, args []string) error {
|
|
|
|
cfg := &config.Config{}
|
|
|
|
|
|
|
|
// command-line flag takes precedence over environment variable
|
|
|
|
profileFlag := cmd.Flag("profile")
|
|
|
|
if profileFlag != nil {
|
|
|
|
cfg.Profile = profileFlag.Value.String()
|
|
|
|
}
|
|
|
|
|
|
|
|
// try configuring a bundle
|
|
|
|
err := TryConfigureBundle(cmd, args)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// and load the config from there
|
|
|
|
currentBundle := bundle.GetOrNil(cmd.Context())
|
|
|
|
if currentBundle != nil {
|
|
|
|
cfg = currentBundle.WorkspaceClient().Config
|
|
|
|
}
|
|
|
|
|
|
|
|
TRY_AUTH: // or try picking a config profile dynamically
|
|
|
|
ctx := cmd.Context()
|
|
|
|
w, err := databricks.NewWorkspaceClient((*databricks.Config)(cfg))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
// get current user identity also to verify validity of configuration
|
|
|
|
me, err := w.CurrentUser.Me(ctx)
|
|
|
|
if cmdio.IsInteractive(ctx) && errors.Is(err, config.ErrCannotConfigureAuth) {
|
|
|
|
profile, err := askForWorkspaceProfile()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
cfg = &config.Config{Profile: profile}
|
|
|
|
goto TRY_AUTH
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
ctx = context.WithValue(ctx, ¤tUser, me)
|
|
|
|
ctx = context.WithValue(ctx, &workspaceClient, w)
|
|
|
|
cmd.SetContext(ctx)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type profile struct {
|
|
|
|
Name string
|
|
|
|
Host string
|
|
|
|
AccountID string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p profile) Cloud() string {
|
|
|
|
if strings.Contains(p.Host, ".azuredatabricks.net") {
|
|
|
|
return "Azure"
|
|
|
|
}
|
|
|
|
if strings.Contains(p.Host, "gcp.databricks.com") {
|
|
|
|
return "GCP"
|
|
|
|
}
|
|
|
|
return "AWS"
|
|
|
|
}
|
|
|
|
|
|
|
|
func loadProfiles() (profiles []profile, err error) {
|
|
|
|
homedir, err := os.UserHomeDir()
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("cannot find homedir: %w", err)
|
|
|
|
}
|
|
|
|
file := filepath.Join(homedir, ".databrickscfg")
|
|
|
|
iniFile, err := ini.Load(file)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, v := range iniFile.Sections() {
|
|
|
|
all := v.KeysHash()
|
|
|
|
host, ok := all["host"]
|
|
|
|
if !ok {
|
|
|
|
// invalid profile
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
profiles = append(profiles, profile{
|
|
|
|
Name: v.Name(),
|
|
|
|
Host: host,
|
|
|
|
AccountID: all["account_id"],
|
|
|
|
})
|
|
|
|
}
|
|
|
|
return profiles, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func askForWorkspaceProfile() (string, error) {
|
|
|
|
profiles, err := loadProfiles()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
var items []profile
|
|
|
|
for _, v := range profiles {
|
|
|
|
if v.AccountID != "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
items = append(items, v)
|
|
|
|
}
|
|
|
|
label := "~/.databrickscfg profile"
|
|
|
|
i, _, err := (&promptui.Select{
|
|
|
|
Label: label,
|
|
|
|
Items: items,
|
|
|
|
Templates: &promptui.SelectTemplates{
|
|
|
|
Active: `{{.Name | bold}} ({{.Host|faint}})`,
|
|
|
|
Inactive: `{{.Name}}`,
|
|
|
|
Selected: fmt.Sprintf(`{{ "%s" | faint }}: {{ .Name | bold }}`, label),
|
|
|
|
},
|
|
|
|
Stdin: os.Stdin,
|
|
|
|
}).Run()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return items[i].Name, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func askForAccountProfile() (string, error) {
|
|
|
|
profiles, err := loadProfiles()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
var items []profile
|
|
|
|
for _, v := range profiles {
|
|
|
|
if v.AccountID == "" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
items = append(items, v)
|
|
|
|
}
|
|
|
|
if len(items) == 1 {
|
|
|
|
return items[0].Name, nil
|
|
|
|
}
|
|
|
|
label := "~/.databrickscfg profile"
|
|
|
|
i, _, err := (&promptui.Select{
|
|
|
|
Label: label,
|
|
|
|
Items: items,
|
|
|
|
Templates: &promptui.SelectTemplates{
|
|
|
|
Active: `{{.Name | bold}} ({{.AccountID|faint}} {{.Cloud|faint}})`,
|
|
|
|
Inactive: `{{.Name}}`,
|
|
|
|
Selected: fmt.Sprintf(`{{ "%s" | faint }}: {{ .Name | bold }}`, label),
|
|
|
|
},
|
|
|
|
Stdin: os.Stdin,
|
|
|
|
}).Run()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return items[i].Name, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func WorkspaceClient(ctx context.Context) *databricks.WorkspaceClient {
|
|
|
|
w, ok := ctx.Value(&workspaceClient).(*databricks.WorkspaceClient)
|
|
|
|
if !ok {
|
|
|
|
panic("cannot get *databricks.WorkspaceClient. Please report it as a bug")
|
|
|
|
}
|
|
|
|
return w
|
|
|
|
}
|
|
|
|
|
|
|
|
func AccountClient(ctx context.Context) *databricks.AccountClient {
|
|
|
|
a, ok := ctx.Value(&accountClient).(*databricks.AccountClient)
|
|
|
|
if !ok {
|
|
|
|
panic("cannot get *databricks.AccountClient. Please report it as a bug")
|
|
|
|
}
|
|
|
|
return a
|
|
|
|
}
|
|
|
|
|
|
|
|
func Me(ctx context.Context) *iam.User {
|
|
|
|
me, ok := ctx.Value(¤tUser).(*iam.User)
|
|
|
|
if !ok {
|
|
|
|
panic("cannot get current user. Please report it as a bug")
|
|
|
|
}
|
|
|
|
return me
|
|
|
|
}
|