Merge remote-tracking branch 'databricks/main' into extend-deployment-modes

This commit is contained in:
Lennart Kats 2023-07-15 16:08:44 +02:00
commit d68d054160
49 changed files with 1916 additions and 321 deletions

View File

@ -44,7 +44,7 @@ var Cmd = &cobra.Command{
{{end}} {{end}}
// start {{.KebabName}} command // start {{.KebabName}} command
{{- $useJsonForAllFields := or .IsJsonOnly (and .Request (or (not .Request.IsAllRequiredFieldsPrimitive) .Request.IsAllRequiredFieldsJsonUnserialisable)) -}} {{- $useJsonForAllFields := or .IsJsonOnly (and .Request (or (not .Request.IsAllRequiredFieldsPrimitive) .Request.HasRequiredNonBodyField)) -}}
{{- $needJsonFlag := or $useJsonForAllFields (and .Request (not .Request.IsOnlyPrimitiveFields)) -}} {{- $needJsonFlag := or $useJsonForAllFields (and .Request (not .Request.IsOnlyPrimitiveFields)) -}}
{{- if .Request}} {{- if .Request}}
var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}

View File

@ -1,5 +1,24 @@
# Version changelog # Version changelog
## 0.200.2
CLI:
* Fix secrets put-secret command ([#545](https://github.com/databricks/cli/pull/545)).
* Fixed ignoring required positional parameters when --json flag is provided ([#535](https://github.com/databricks/cli/pull/535)).
* Update cp help message to not require file scheme ([#554](https://github.com/databricks/cli/pull/554)).
Bundles:
* Fix: bundle destroy fails when bundle.tf.json file is deleted ([#519](https://github.com/databricks/cli/pull/519)).
* Fixed error reporting when included invalid files in include section ([#543](https://github.com/databricks/cli/pull/543)).
* Make top level workspace optional in JSON schema ([#562](https://github.com/databricks/cli/pull/562)).
* Propagate TF_CLI_CONFIG_FILE env variable ([#555](https://github.com/databricks/cli/pull/555)).
* Update Terraform provider schema structs ([#563](https://github.com/databricks/cli/pull/563)).
* Update inline JSON schema documentation ([#557](https://github.com/databricks/cli/pull/557)).
Dependencies:
* Bump Go SDK to v0.12.0 ([#540](https://github.com/databricks/cli/pull/540)).
* Bump github.com/hashicorp/terraform-json from 0.17.0 to 0.17.1 ([#541](https://github.com/databricks/cli/pull/541)).
## 0.200.1 ## 0.200.1
CLI: CLI:

View File

@ -1,3 +1,4 @@
<<<<<<< HEAD
package mutator package mutator
import ( import (
@ -179,3 +180,95 @@ func (m *processEnvironmentMode) Apply(ctx context.Context, b *bundle.Bundle) er
return nil return nil
} }
||||||| 3354750
=======
package mutator
import (
"context"
"fmt"
"path"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml"
)
type processEnvironmentMode struct{}
const developmentConcurrentRuns = 4
func ProcessEnvironmentMode() bundle.Mutator {
return &processEnvironmentMode{}
}
func (m *processEnvironmentMode) Name() string {
return "ProcessEnvironmentMode"
}
// Mark all resources as being for 'development' purposes, i.e.
// changing their their name, adding tags, and (in the future)
// marking them as 'hidden' in the UI.
func processDevelopmentMode(b *bundle.Bundle) error {
r := b.Config.Resources
for i := range r.Jobs {
r.Jobs[i].Name = "[dev] " + r.Jobs[i].Name
if r.Jobs[i].Tags == nil {
r.Jobs[i].Tags = make(map[string]string)
}
r.Jobs[i].Tags["dev"] = ""
if r.Jobs[i].MaxConcurrentRuns == 0 {
r.Jobs[i].MaxConcurrentRuns = developmentConcurrentRuns
}
if r.Jobs[i].Schedule != nil {
r.Jobs[i].Schedule.PauseStatus = jobs.PauseStatusPaused
}
if r.Jobs[i].Continuous != nil {
r.Jobs[i].Continuous.PauseStatus = jobs.PauseStatusPaused
}
if r.Jobs[i].Trigger != nil {
r.Jobs[i].Trigger.PauseStatus = jobs.PauseStatusPaused
}
}
for i := range r.Pipelines {
r.Pipelines[i].Name = "[dev] " + r.Pipelines[i].Name
r.Pipelines[i].Development = true
// (pipelines don't yet support tags)
}
for i := range r.Models {
r.Models[i].Name = "[dev] " + r.Models[i].Name
r.Models[i].Tags = append(r.Models[i].Tags, ml.ModelTag{Key: "dev", Value: ""})
}
for i := range r.Experiments {
filepath := r.Experiments[i].Name
dir := path.Dir(filepath)
base := path.Base(filepath)
if dir == "." {
r.Experiments[i].Name = "[dev] " + base
} else {
r.Experiments[i].Name = dir + "/[dev] " + base
}
r.Experiments[i].Tags = append(r.Experiments[i].Tags, ml.ExperimentTag{Key: "dev", Value: ""})
}
return nil
}
func (m *processEnvironmentMode) Apply(ctx context.Context, b *bundle.Bundle) error {
switch b.Config.Bundle.Mode {
case config.Development:
return processDevelopmentMode(b)
case "":
// No action
default:
return fmt.Errorf("unsupported value specified for 'mode': %s", b.Config.Bundle.Mode)
}
return nil
}
>>>>>>> databricks/main

View File

@ -1,3 +1,4 @@
<<<<<<< HEAD
package mutator package mutator
import ( import (
@ -157,3 +158,83 @@ func TestAllResourcesRenamed(t *testing.T) {
} }
} }
} }
||||||| 3354750
=======
package mutator_test
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestProcessEnvironmentModeApplyDebug(t *testing.T) {
bundle := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Mode: config.Development,
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {JobSettings: &jobs.JobSettings{Name: "job1"}},
},
Pipelines: map[string]*resources.Pipeline{
"pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}},
},
Experiments: map[string]*resources.MlflowExperiment{
"experiment1": {Experiment: &ml.Experiment{Name: "/Users/lennart.kats@databricks.com/experiment1"}},
"experiment2": {Experiment: &ml.Experiment{Name: "experiment2"}},
},
Models: map[string]*resources.MlflowModel{
"model1": {Model: &ml.Model{Name: "model1"}},
},
},
},
}
m := mutator.ProcessEnvironmentMode()
err := m.Apply(context.Background(), bundle)
require.NoError(t, err)
assert.Equal(t, "[dev] job1", bundle.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "[dev] pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name)
assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name)
assert.Equal(t, "[dev] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name)
assert.Equal(t, "[dev] model1", bundle.Config.Resources.Models["model1"].Name)
assert.Equal(t, "dev", bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key)
assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
}
func TestProcessEnvironmentModeApplyDefault(t *testing.T) {
bundle := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Mode: "",
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {JobSettings: &jobs.JobSettings{Name: "job1"}},
},
Pipelines: map[string]*resources.Pipeline{
"pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}},
},
},
},
}
m := mutator.ProcessEnvironmentMode()
err := m.Apply(context.Background(), bundle)
require.NoError(t, err)
assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name)
assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
}
>>>>>>> databricks/main

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"path/filepath" "path/filepath"
"strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
@ -49,6 +50,12 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error
return err return err
} }
// If the entry is not a glob pattern and no matches found,
// return an error because the file defined is not found
if len(matches) == 0 && !strings.ContainsAny(entry, "*?[") {
return fmt.Errorf("%s defined in 'include' section does not match any files", entry)
}
// Filter matches to ones we haven't seen yet. // Filter matches to ones we haven't seen yet.
var includes []string var includes []string
for _, match := range matches { for _, match := range matches {

View File

@ -108,3 +108,17 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, []string{"a.yml"}, bundle.Config.Include) assert.Equal(t, []string{"a.yml"}, bundle.Config.Include)
} }
func TestProcessRootIncludesNotExists(t *testing.T) {
bundle := &bundle.Bundle{
Config: config.Root{
Path: t.TempDir(),
Include: []string{
"notexist.yml",
},
},
}
err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle)
require.Error(t, err)
assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files")
}

View File

@ -2,6 +2,7 @@ package mutator
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"os" "os"
"path" "path"
@ -14,6 +15,22 @@ import (
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
) )
type ErrIsNotebook struct {
path string
}
func (err ErrIsNotebook) Error() string {
return fmt.Sprintf("file at %s is a notebook", err.path)
}
type ErrIsNotNotebook struct {
path string
}
func (err ErrIsNotNotebook) Error() string {
return fmt.Sprintf("file at %s is not a notebook", err.path)
}
type translatePaths struct { type translatePaths struct {
seen map[string]string seen map[string]string
} }
@ -86,7 +103,7 @@ func (m *translatePaths) translateNotebookPath(literal, localPath, remotePath st
return "", fmt.Errorf("unable to determine if %s is a notebook: %w", localPath, err) return "", fmt.Errorf("unable to determine if %s is a notebook: %w", localPath, err)
} }
if !nb { if !nb {
return "", fmt.Errorf("file at %s is not a notebook", localPath) return "", ErrIsNotNotebook{localPath}
} }
// Upon import, notebooks are stripped of their extension. // Upon import, notebooks are stripped of their extension.
@ -94,14 +111,16 @@ func (m *translatePaths) translateNotebookPath(literal, localPath, remotePath st
} }
func (m *translatePaths) translateFilePath(literal, localPath, remotePath string) (string, error) { func (m *translatePaths) translateFilePath(literal, localPath, remotePath string) (string, error) {
_, err := os.Stat(localPath) nb, _, err := notebook.Detect(localPath)
if os.IsNotExist(err) { if os.IsNotExist(err) {
return "", fmt.Errorf("file %s not found", literal) return "", fmt.Errorf("file %s not found", literal)
} }
if err != nil { if err != nil {
return "", fmt.Errorf("unable to access %s: %w", localPath, err) return "", fmt.Errorf("unable to determine if %s is not a notebook: %w", localPath, err)
}
if nb {
return "", ErrIsNotebook{localPath}
} }
return remotePath, nil return remotePath, nil
} }
@ -110,6 +129,9 @@ func (m *translatePaths) translateJobTask(dir string, b *bundle.Bundle, task *jo
if task.NotebookTask != nil { if task.NotebookTask != nil {
err = m.rewritePath(dir, b, &task.NotebookTask.NotebookPath, m.translateNotebookPath) err = m.rewritePath(dir, b, &task.NotebookTask.NotebookPath, m.translateNotebookPath)
if target := (&ErrIsNotNotebook{}); errors.As(err, target) {
return fmt.Errorf(`expected a notebook for "tasks.notebook_task.notebook_path" but got a file: %w`, target)
}
if err != nil { if err != nil {
return err return err
} }
@ -117,6 +139,9 @@ func (m *translatePaths) translateJobTask(dir string, b *bundle.Bundle, task *jo
if task.SparkPythonTask != nil { if task.SparkPythonTask != nil {
err = m.rewritePath(dir, b, &task.SparkPythonTask.PythonFile, m.translateFilePath) err = m.rewritePath(dir, b, &task.SparkPythonTask.PythonFile, m.translateFilePath)
if target := (&ErrIsNotebook{}); errors.As(err, target) {
return fmt.Errorf(`expected a file for "tasks.spark_python_task.python_file" but got a notebook: %w`, target)
}
if err != nil { if err != nil {
return err return err
} }
@ -130,6 +155,9 @@ func (m *translatePaths) translatePipelineLibrary(dir string, b *bundle.Bundle,
if library.Notebook != nil { if library.Notebook != nil {
err = m.rewritePath(dir, b, &library.Notebook.Path, m.translateNotebookPath) err = m.rewritePath(dir, b, &library.Notebook.Path, m.translateNotebookPath)
if target := (&ErrIsNotNotebook{}); errors.As(err, target) {
return fmt.Errorf(`expected a notebook for "libraries.notebook.path" but got a file: %w`, target)
}
if err != nil { if err != nil {
return err return err
} }
@ -137,6 +165,9 @@ func (m *translatePaths) translatePipelineLibrary(dir string, b *bundle.Bundle,
if library.File != nil { if library.File != nil {
err = m.rewritePath(dir, b, &library.File.Path, m.translateFilePath) err = m.rewritePath(dir, b, &library.File.Path, m.translateFilePath)
if target := (&ErrIsNotebook{}); errors.As(err, target) {
return fmt.Errorf(`expected a file for "libraries.file.path" but got a notebook: %w`, target)
}
if err != nil { if err != nil {
return err return err
} }

View File

@ -455,3 +455,143 @@ func TestPipelineFileDoesNotExistError(t *testing.T) {
err := mutator.TranslatePaths().Apply(context.Background(), bundle) err := mutator.TranslatePaths().Apply(context.Background(), bundle)
assert.EqualError(t, err, "file ./doesnt_exist.py not found") assert.EqualError(t, err, "file ./doesnt_exist.py not found")
} }
func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) {
dir := t.TempDir()
touchNotebookFile(t, filepath.Join(dir, "my_notebook.py"))
bundle := &bundle.Bundle{
Config: config.Root{
Path: dir,
Workspace: config.Workspace{
FilesPath: "/bundle",
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job": {
Paths: resources.Paths{
ConfigFilePath: filepath.Join(dir, "resource.yml"),
},
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
{
SparkPythonTask: &jobs.SparkPythonTask{
PythonFile: "./my_notebook.py",
},
},
},
},
},
},
},
},
}
err := mutator.TranslatePaths().Apply(context.Background(), bundle)
assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`)
}
func TestJobNotebookTaskWithFileSourceError(t *testing.T) {
dir := t.TempDir()
touchEmptyFile(t, filepath.Join(dir, "my_file.py"))
bundle := &bundle.Bundle{
Config: config.Root{
Path: dir,
Workspace: config.Workspace{
FilesPath: "/bundle",
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job": {
Paths: resources.Paths{
ConfigFilePath: filepath.Join(dir, "resource.yml"),
},
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
{
NotebookTask: &jobs.NotebookTask{
NotebookPath: "./my_file.py",
},
},
},
},
},
},
},
},
}
err := mutator.TranslatePaths().Apply(context.Background(), bundle)
assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`)
}
func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
dir := t.TempDir()
touchEmptyFile(t, filepath.Join(dir, "my_file.py"))
bundle := &bundle.Bundle{
Config: config.Root{
Path: dir,
Workspace: config.Workspace{
FilesPath: "/bundle",
},
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"pipeline": {
Paths: resources.Paths{
ConfigFilePath: filepath.Join(dir, "resource.yml"),
},
PipelineSpec: &pipelines.PipelineSpec{
Libraries: []pipelines.PipelineLibrary{
{
Notebook: &pipelines.NotebookLibrary{
Path: "./my_file.py",
},
},
},
},
},
},
},
},
}
err := mutator.TranslatePaths().Apply(context.Background(), bundle)
assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`)
}
func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
dir := t.TempDir()
touchNotebookFile(t, filepath.Join(dir, "my_notebook.py"))
bundle := &bundle.Bundle{
Config: config.Root{
Path: dir,
Workspace: config.Workspace{
FilesPath: "/bundle",
},
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"pipeline": {
Paths: resources.Paths{
ConfigFilePath: filepath.Join(dir, "resource.yml"),
},
PipelineSpec: &pipelines.PipelineSpec{
Libraries: []pipelines.PipelineLibrary{
{
File: &pipelines.FileLibrary{
Path: "./my_notebook.py",
},
},
},
},
},
},
},
},
}
err := mutator.TranslatePaths().Apply(context.Background(), bundle)
assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`)
}

View File

@ -36,7 +36,7 @@ type Root struct {
// Workspace contains details about the workspace to connect to // Workspace contains details about the workspace to connect to
// and paths in the workspace tree to use for this bundle. // and paths in the workspace tree to use for this bundle.
Workspace Workspace `json:"workspace"` Workspace Workspace `json:"workspace,omitempty"`
// Artifacts contains a description of all code artifacts in this bundle. // Artifacts contains a description of all code artifacts in this bundle.
Artifacts map[string]*Artifact `json:"artifacts,omitempty"` Artifacts map[string]*Artifact `json:"artifacts,omitempty"`
@ -118,7 +118,7 @@ func (r *Root) Load(path string) error {
} }
err = yaml.Unmarshal(raw, r) err = yaml.Unmarshal(raw, r)
if err != nil { if err != nil {
return err return fmt.Errorf("failed to load %s: %w", path, err)
} }
r.Path = filepath.Dir(path) r.Path = filepath.Dir(path)
@ -190,6 +190,7 @@ func (r *Root) MergeEnvironment(env *Environment) error {
} }
} }
<<<<<<< HEAD
if env.Mode != "" { if env.Mode != "" {
r.Bundle.Mode = env.Mode r.Bundle.Mode = env.Mode
} }
@ -209,5 +210,16 @@ func (r *Root) MergeEnvironment(env *Environment) error {
r.Bundle.Git.OriginURL = env.Git.OriginURL r.Bundle.Git.OriginURL = env.Git.OriginURL
} }
||||||| 3354750
=======
if env.Mode != "" {
r.Bundle.Mode = env.Mode
}
if env.ComputeID != "" {
r.Bundle.ComputeID = env.ComputeID
}
>>>>>>> databricks/main
return nil return nil
} }

View File

@ -82,12 +82,9 @@ func (w *Workspace) Client() (*databricks.WorkspaceClient, error) {
AzureLoginAppID: w.AzureLoginAppID, AzureLoginAppID: w.AzureLoginAppID,
} }
// HACKY fix to not used host based auth when the profile is already set
profile := os.Getenv("DATABRICKS_CONFIG_PROFILE")
// If only the host is configured, we try and unambiguously match it to // If only the host is configured, we try and unambiguously match it to
// a profile in the user's databrickscfg file. Override the default loaders. // a profile in the user's databrickscfg file. Override the default loaders.
if w.Host != "" && w.Profile == "" && profile == "" { if w.Host != "" && w.Profile == "" {
cfg.Loaders = []config.Loader{ cfg.Loaders = []config.Loader{
// Load auth creds from env vars // Load auth creds from env vars
config.ConfigAttributes, config.ConfigAttributes,
@ -98,6 +95,13 @@ func (w *Workspace) Client() (*databricks.WorkspaceClient, error) {
} }
} }
if w.Profile != "" && w.Host != "" {
err := databrickscfg.ValidateConfigAndProfileHost(&cfg, w.Profile)
if err != nil {
return nil, err
}
}
return databricks.NewWorkspaceClient(&cfg) return databricks.NewWorkspaceClient(&cfg)
} }

View File

@ -70,6 +70,23 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con
return tf.ExecPath, nil return tf.ExecPath, nil
} }
// This function inherits some environment variables for Terraform CLI.
func inheritEnvVars(env map[string]string) error {
// Include $HOME in set of environment variables to pass along.
home, ok := os.LookupEnv("HOME")
if ok {
env["HOME"] = home
}
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
configFile, ok := os.LookupEnv("TF_CLI_CONFIG_FILE")
if ok {
env["TF_CLI_CONFIG_FILE"] = configFile
}
return nil
}
// This function sets temp dir location for terraform to use. If user does not // This function sets temp dir location for terraform to use. If user does not
// specify anything here, we fall back to a `tmp` directory in the bundle's cache // specify anything here, we fall back to a `tmp` directory in the bundle's cache
// directory // directory
@ -145,10 +162,9 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error {
return err return err
} }
// Include $HOME in set of environment variables to pass along. err = inheritEnvVars(env)
home, ok := os.LookupEnv("HOME") if err != nil {
if ok { return err
env["HOME"] = home
} }
// Set the temporary directory environment variables // Set the temporary directory environment variables

View File

@ -272,3 +272,19 @@ func TestSetProxyEnvVars(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env)) assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env))
} }
func TestInheritEnvVars(t *testing.T) {
env := map[string]string{}
t.Setenv("HOME", "/home/testuser")
t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc")
err := inheritEnvVars(env)
require.NoError(t, err)
require.Equal(t, map[string]string{
"HOME": "/home/testuser",
"TF_CLI_CONFIG_FILE": "/tmp/config.tfrc",
}, env)
}

View File

@ -14,6 +14,7 @@ type Config struct {
AzureWorkspaceResourceId string `json:"azure_workspace_resource_id,omitempty"` AzureWorkspaceResourceId string `json:"azure_workspace_resource_id,omitempty"`
ClientId string `json:"client_id,omitempty"` ClientId string `json:"client_id,omitempty"`
ClientSecret string `json:"client_secret,omitempty"` ClientSecret string `json:"client_secret,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ConfigFile string `json:"config_file,omitempty"` ConfigFile string `json:"config_file,omitempty"`
DatabricksCliPath string `json:"databricks_cli_path,omitempty"` DatabricksCliPath string `json:"databricks_cli_path,omitempty"`
DebugHeaders bool `json:"debug_headers,omitempty"` DebugHeaders bool `json:"debug_headers,omitempty"`
@ -30,4 +31,5 @@ type Config struct {
SkipVerify bool `json:"skip_verify,omitempty"` SkipVerify bool `json:"skip_verify,omitempty"`
Token string `json:"token,omitempty"` Token string `json:"token,omitempty"`
Username string `json:"username,omitempty"` Username string `json:"username,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
} }

View File

@ -2,6 +2,15 @@
package schema package schema
type DataSourceJobJobSettingsSettingsComputeSpec struct {
Kind string `json:"kind,omitempty"`
}
type DataSourceJobJobSettingsSettingsCompute struct {
ComputeKey string `json:"compute_key,omitempty"`
Spec *DataSourceJobJobSettingsSettingsComputeSpec `json:"spec,omitempty"`
}
type DataSourceJobJobSettingsSettingsContinuous struct { type DataSourceJobJobSettingsSettingsContinuous struct {
PauseStatus string `json:"pause_status,omitempty"` PauseStatus string `json:"pause_status,omitempty"`
} }
@ -415,6 +424,12 @@ type DataSourceJobJobSettingsSettingsSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"` Parameters []string `json:"parameters,omitempty"`
} }
type DataSourceJobJobSettingsSettingsTaskConditionTask struct {
Left string `json:"left,omitempty"`
Op string `json:"op,omitempty"`
Right string `json:"right,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskDbtTask struct { type DataSourceJobJobSettingsSettingsTaskDbtTask struct {
Catalog string `json:"catalog,omitempty"` Catalog string `json:"catalog,omitempty"`
Commands []string `json:"commands"` Commands []string `json:"commands"`
@ -425,7 +440,8 @@ type DataSourceJobJobSettingsSettingsTaskDbtTask struct {
} }
type DataSourceJobJobSettingsSettingsTaskDependsOn struct { type DataSourceJobJobSettingsSettingsTaskDependsOn struct {
TaskKey string `json:"task_key,omitempty"` Outcome string `json:"outcome,omitempty"`
TaskKey string `json:"task_key"`
} }
type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct { type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct {
@ -645,12 +661,27 @@ type DataSourceJobJobSettingsSettingsTaskSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"` Parameters []string `json:"parameters,omitempty"`
} }
type DataSourceJobJobSettingsSettingsTaskSqlTaskAlertSubscriptions struct {
DestinationId string `json:"destination_id,omitempty"`
UserName string `json:"user_name,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskAlert struct { type DataSourceJobJobSettingsSettingsTaskSqlTaskAlert struct {
AlertId string `json:"alert_id"` AlertId string `json:"alert_id"`
PauseSubscriptions bool `json:"pause_subscriptions,omitempty"`
Subscriptions []DataSourceJobJobSettingsSettingsTaskSqlTaskAlertSubscriptions `json:"subscriptions,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptions struct {
DestinationId string `json:"destination_id,omitempty"`
UserName string `json:"user_name,omitempty"`
} }
type DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard struct { type DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard struct {
DashboardId string `json:"dashboard_id"` CustomSubject string `json:"custom_subject,omitempty"`
DashboardId string `json:"dashboard_id"`
PauseSubscriptions bool `json:"pause_subscriptions,omitempty"`
Subscriptions []DataSourceJobJobSettingsSettingsTaskSqlTaskDashboardSubscriptions `json:"subscriptions,omitempty"`
} }
type DataSourceJobJobSettingsSettingsTaskSqlTaskFile struct { type DataSourceJobJobSettingsSettingsTaskSqlTaskFile struct {
@ -671,6 +702,7 @@ type DataSourceJobJobSettingsSettingsTaskSqlTask struct {
} }
type DataSourceJobJobSettingsSettingsTask struct { type DataSourceJobJobSettingsSettingsTask struct {
ComputeKey string `json:"compute_key,omitempty"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
ExistingClusterId string `json:"existing_cluster_id,omitempty"` ExistingClusterId string `json:"existing_cluster_id,omitempty"`
JobClusterKey string `json:"job_cluster_key,omitempty"` JobClusterKey string `json:"job_cluster_key,omitempty"`
@ -680,6 +712,7 @@ type DataSourceJobJobSettingsSettingsTask struct {
RunIf string `json:"run_if,omitempty"` RunIf string `json:"run_if,omitempty"`
TaskKey string `json:"task_key,omitempty"` TaskKey string `json:"task_key,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"` TimeoutSeconds int `json:"timeout_seconds,omitempty"`
ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"`
DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"`
DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"`
EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"`
@ -695,9 +728,9 @@ type DataSourceJobJobSettingsSettingsTask struct {
} }
type DataSourceJobJobSettingsSettingsTriggerFileArrival struct { type DataSourceJobJobSettingsSettingsTriggerFileArrival struct {
MinTimeBetweenTriggerSeconds int `json:"min_time_between_trigger_seconds,omitempty"` MinTimeBetweenTriggersSeconds int `json:"min_time_between_triggers_seconds,omitempty"`
Url string `json:"url"` Url string `json:"url"`
WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"` WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"`
} }
type DataSourceJobJobSettingsSettingsTrigger struct { type DataSourceJobJobSettingsSettingsTrigger struct {
@ -733,6 +766,7 @@ type DataSourceJobJobSettingsSettings struct {
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
Tags map[string]string `json:"tags,omitempty"` Tags map[string]string `json:"tags,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"` TimeoutSeconds int `json:"timeout_seconds,omitempty"`
Compute []DataSourceJobJobSettingsSettingsCompute `json:"compute,omitempty"`
Continuous *DataSourceJobJobSettingsSettingsContinuous `json:"continuous,omitempty"` Continuous *DataSourceJobJobSettingsSettingsContinuous `json:"continuous,omitempty"`
DbtTask *DataSourceJobJobSettingsSettingsDbtTask `json:"dbt_task,omitempty"` DbtTask *DataSourceJobJobSettingsSettingsDbtTask `json:"dbt_task,omitempty"`
EmailNotifications *DataSourceJobJobSettingsSettingsEmailNotifications `json:"email_notifications,omitempty"` EmailNotifications *DataSourceJobJobSettingsSettingsEmailNotifications `json:"email_notifications,omitempty"`

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceCatalogWorkspaceBinding struct {
CatalogName string `json:"catalog_name"`
Id string `json:"id,omitempty"`
WorkspaceId string `json:"workspace_id"`
}

View File

@ -19,5 +19,6 @@ type ResourceGrants struct {
StorageCredential string `json:"storage_credential,omitempty"` StorageCredential string `json:"storage_credential,omitempty"`
Table string `json:"table,omitempty"` Table string `json:"table,omitempty"`
View string `json:"view,omitempty"` View string `json:"view,omitempty"`
Volume string `json:"volume,omitempty"`
Grant []ResourceGrantsGrant `json:"grant,omitempty"` Grant []ResourceGrantsGrant `json:"grant,omitempty"`
} }

View File

@ -2,6 +2,15 @@
package schema package schema
type ResourceJobComputeSpec struct {
Kind string `json:"kind,omitempty"`
}
type ResourceJobCompute struct {
ComputeKey string `json:"compute_key,omitempty"`
Spec *ResourceJobComputeSpec `json:"spec,omitempty"`
}
type ResourceJobContinuous struct { type ResourceJobContinuous struct {
PauseStatus string `json:"pause_status,omitempty"` PauseStatus string `json:"pause_status,omitempty"`
} }
@ -415,6 +424,12 @@ type ResourceJobSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"` Parameters []string `json:"parameters,omitempty"`
} }
type ResourceJobTaskConditionTask struct {
Left string `json:"left,omitempty"`
Op string `json:"op,omitempty"`
Right string `json:"right,omitempty"`
}
type ResourceJobTaskDbtTask struct { type ResourceJobTaskDbtTask struct {
Catalog string `json:"catalog,omitempty"` Catalog string `json:"catalog,omitempty"`
Commands []string `json:"commands"` Commands []string `json:"commands"`
@ -425,7 +440,8 @@ type ResourceJobTaskDbtTask struct {
} }
type ResourceJobTaskDependsOn struct { type ResourceJobTaskDependsOn struct {
TaskKey string `json:"task_key,omitempty"` Outcome string `json:"outcome,omitempty"`
TaskKey string `json:"task_key"`
} }
type ResourceJobTaskEmailNotifications struct { type ResourceJobTaskEmailNotifications struct {
@ -645,12 +661,27 @@ type ResourceJobTaskSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"` Parameters []string `json:"parameters,omitempty"`
} }
type ResourceJobTaskSqlTaskAlertSubscriptions struct {
DestinationId string `json:"destination_id,omitempty"`
UserName string `json:"user_name,omitempty"`
}
type ResourceJobTaskSqlTaskAlert struct { type ResourceJobTaskSqlTaskAlert struct {
AlertId string `json:"alert_id"` AlertId string `json:"alert_id"`
PauseSubscriptions bool `json:"pause_subscriptions,omitempty"`
Subscriptions []ResourceJobTaskSqlTaskAlertSubscriptions `json:"subscriptions,omitempty"`
}
type ResourceJobTaskSqlTaskDashboardSubscriptions struct {
DestinationId string `json:"destination_id,omitempty"`
UserName string `json:"user_name,omitempty"`
} }
type ResourceJobTaskSqlTaskDashboard struct { type ResourceJobTaskSqlTaskDashboard struct {
DashboardId string `json:"dashboard_id"` CustomSubject string `json:"custom_subject,omitempty"`
DashboardId string `json:"dashboard_id"`
PauseSubscriptions bool `json:"pause_subscriptions,omitempty"`
Subscriptions []ResourceJobTaskSqlTaskDashboardSubscriptions `json:"subscriptions,omitempty"`
} }
type ResourceJobTaskSqlTaskFile struct { type ResourceJobTaskSqlTaskFile struct {
@ -671,6 +702,7 @@ type ResourceJobTaskSqlTask struct {
} }
type ResourceJobTask struct { type ResourceJobTask struct {
ComputeKey string `json:"compute_key,omitempty"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
ExistingClusterId string `json:"existing_cluster_id,omitempty"` ExistingClusterId string `json:"existing_cluster_id,omitempty"`
JobClusterKey string `json:"job_cluster_key,omitempty"` JobClusterKey string `json:"job_cluster_key,omitempty"`
@ -680,6 +712,7 @@ type ResourceJobTask struct {
RunIf string `json:"run_if,omitempty"` RunIf string `json:"run_if,omitempty"`
TaskKey string `json:"task_key,omitempty"` TaskKey string `json:"task_key,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"` TimeoutSeconds int `json:"timeout_seconds,omitempty"`
ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"`
DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"`
DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"`
EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"`
@ -695,9 +728,9 @@ type ResourceJobTask struct {
} }
type ResourceJobTriggerFileArrival struct { type ResourceJobTriggerFileArrival struct {
MinTimeBetweenTriggerSeconds int `json:"min_time_between_trigger_seconds,omitempty"` MinTimeBetweenTriggersSeconds int `json:"min_time_between_triggers_seconds,omitempty"`
Url string `json:"url"` Url string `json:"url"`
WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"` WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"`
} }
type ResourceJobTrigger struct { type ResourceJobTrigger struct {
@ -736,6 +769,7 @@ type ResourceJob struct {
Tags map[string]string `json:"tags,omitempty"` Tags map[string]string `json:"tags,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"` TimeoutSeconds int `json:"timeout_seconds,omitempty"`
Url string `json:"url,omitempty"` Url string `json:"url,omitempty"`
Compute []ResourceJobCompute `json:"compute,omitempty"`
Continuous *ResourceJobContinuous `json:"continuous,omitempty"` Continuous *ResourceJobContinuous `json:"continuous,omitempty"`
DbtTask *ResourceJobDbtTask `json:"dbt_task,omitempty"` DbtTask *ResourceJobDbtTask `json:"dbt_task,omitempty"`
EmailNotifications *ResourceJobEmailNotifications `json:"email_notifications,omitempty"` EmailNotifications *ResourceJobEmailNotifications `json:"email_notifications,omitempty"`

View File

@ -3,11 +3,12 @@
package schema package schema
type ResourceModelServingConfigServedModels struct { type ResourceModelServingConfigServedModels struct {
ModelName string `json:"model_name"` EnvironmentVars map[string]string `json:"environment_vars,omitempty"`
ModelVersion string `json:"model_version"` ModelName string `json:"model_name"`
Name string `json:"name,omitempty"` ModelVersion string `json:"model_version"`
ScaleToZeroEnabled bool `json:"scale_to_zero_enabled,omitempty"` Name string `json:"name,omitempty"`
WorkloadSize string `json:"workload_size"` ScaleToZeroEnabled bool `json:"scale_to_zero_enabled,omitempty"`
WorkloadSize string `json:"workload_size"`
} }
type ResourceModelServingConfigTrafficConfigRoutes struct { type ResourceModelServingConfigTrafficConfigRoutes struct {

View File

@ -8,6 +8,7 @@ type ResourceServicePrincipal struct {
AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"`
ApplicationId string `json:"application_id,omitempty"` ApplicationId string `json:"application_id,omitempty"`
DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"` DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"`
DisableAsUserDeletion bool `json:"disable_as_user_deletion,omitempty"`
DisplayName string `json:"display_name,omitempty"` DisplayName string `json:"display_name,omitempty"`
ExternalId string `json:"external_id,omitempty"` ExternalId string `json:"external_id,omitempty"`
Force bool `json:"force,omitempty"` Force bool `json:"force,omitempty"`

View File

@ -5,6 +5,7 @@ package schema
type ResourceSqlGlobalConfig struct { type ResourceSqlGlobalConfig struct {
DataAccessConfig map[string]string `json:"data_access_config,omitempty"` DataAccessConfig map[string]string `json:"data_access_config,omitempty"`
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"` EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"` InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SecurityPolicy string `json:"security_policy,omitempty"` SecurityPolicy string `json:"security_policy,omitempty"`

View File

@ -7,6 +7,7 @@ type ResourceUser struct {
AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"`
AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"`
DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"` DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"`
DisableAsUserDeletion bool `json:"disable_as_user_deletion,omitempty"`
DisplayName string `json:"display_name,omitempty"` DisplayName string `json:"display_name,omitempty"`
ExternalId string `json:"external_id,omitempty"` ExternalId string `json:"external_id,omitempty"`
Force bool `json:"force,omitempty"` Force bool `json:"force,omitempty"`

View File

@ -8,6 +8,7 @@ type Resources struct {
AzureAdlsGen2Mount map[string]*ResourceAzureAdlsGen2Mount `json:"databricks_azure_adls_gen2_mount,omitempty"` AzureAdlsGen2Mount map[string]*ResourceAzureAdlsGen2Mount `json:"databricks_azure_adls_gen2_mount,omitempty"`
AzureBlobMount map[string]*ResourceAzureBlobMount `json:"databricks_azure_blob_mount,omitempty"` AzureBlobMount map[string]*ResourceAzureBlobMount `json:"databricks_azure_blob_mount,omitempty"`
Catalog map[string]*ResourceCatalog `json:"databricks_catalog,omitempty"` Catalog map[string]*ResourceCatalog `json:"databricks_catalog,omitempty"`
CatalogWorkspaceBinding map[string]*ResourceCatalogWorkspaceBinding `json:"databricks_catalog_workspace_binding,omitempty"`
Cluster map[string]*ResourceCluster `json:"databricks_cluster,omitempty"` Cluster map[string]*ResourceCluster `json:"databricks_cluster,omitempty"`
ClusterPolicy map[string]*ResourceClusterPolicy `json:"databricks_cluster_policy,omitempty"` ClusterPolicy map[string]*ResourceClusterPolicy `json:"databricks_cluster_policy,omitempty"`
DbfsFile map[string]*ResourceDbfsFile `json:"databricks_dbfs_file,omitempty"` DbfsFile map[string]*ResourceDbfsFile `json:"databricks_dbfs_file,omitempty"`
@ -86,6 +87,7 @@ func NewResources() *Resources {
AzureAdlsGen2Mount: make(map[string]*ResourceAzureAdlsGen2Mount), AzureAdlsGen2Mount: make(map[string]*ResourceAzureAdlsGen2Mount),
AzureBlobMount: make(map[string]*ResourceAzureBlobMount), AzureBlobMount: make(map[string]*ResourceAzureBlobMount),
Catalog: make(map[string]*ResourceCatalog), Catalog: make(map[string]*ResourceCatalog),
CatalogWorkspaceBinding: make(map[string]*ResourceCatalogWorkspaceBinding),
Cluster: make(map[string]*ResourceCluster), Cluster: make(map[string]*ResourceCluster),
ClusterPolicy: make(map[string]*ResourceClusterPolicy), ClusterPolicy: make(map[string]*ResourceClusterPolicy),
DbfsFile: make(map[string]*ResourceDbfsFile), DbfsFile: make(map[string]*ResourceDbfsFile),

View File

@ -14,6 +14,8 @@ func Destroy() bundle.Mutator {
lock.Acquire(), lock.Acquire(),
bundle.Defer( bundle.Defer(
bundle.Seq( bundle.Seq(
terraform.Interpolate(),
terraform.Write(),
terraform.StatePull(), terraform.StatePull(),
terraform.Plan(terraform.PlanGoal("destroy")), terraform.Plan(terraform.PlanGoal("destroy")),
terraform.Destroy(), terraform.Destroy(),

File diff suppressed because it is too large Load Diff

View File

@ -162,7 +162,7 @@ func (reader *OpenapiReader) jobsDocs() (*Docs, error) {
// TODO: add description for id if needed. // TODO: add description for id if needed.
// Tracked in https://github.com/databricks/cli/issues/242 // Tracked in https://github.com/databricks/cli/issues/242
jobsDocs := &Docs{ jobsDocs := &Docs{
Description: "List of job definations", Description: "List of Databricks jobs",
AdditionalProperties: jobDocs, AdditionalProperties: jobDocs,
} }
return jobsDocs, nil return jobsDocs, nil
@ -177,12 +177,38 @@ func (reader *OpenapiReader) pipelinesDocs() (*Docs, error) {
// TODO: Two fields in resources.Pipeline have the json tag id. Clarify the // TODO: Two fields in resources.Pipeline have the json tag id. Clarify the
// semantics and then add a description if needed. (https://github.com/databricks/cli/issues/242) // semantics and then add a description if needed. (https://github.com/databricks/cli/issues/242)
pipelinesDocs := &Docs{ pipelinesDocs := &Docs{
Description: "List of pipeline definations", Description: "List of DLT pipelines",
AdditionalProperties: pipelineDocs, AdditionalProperties: pipelineDocs,
} }
return pipelinesDocs, nil return pipelinesDocs, nil
} }
func (reader *OpenapiReader) experimentsDocs() (*Docs, error) {
experimentSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "ml.Experiment")
if err != nil {
return nil, err
}
experimentDocs := schemaToDocs(experimentSpecSchema)
experimentsDocs := &Docs{
Description: "List of MLflow experiments",
AdditionalProperties: experimentDocs,
}
return experimentsDocs, nil
}
func (reader *OpenapiReader) modelsDocs() (*Docs, error) {
modelSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "ml.Model")
if err != nil {
return nil, err
}
modelDocs := schemaToDocs(modelSpecSchema)
modelsDocs := &Docs{
Description: "List of MLflow models",
AdditionalProperties: modelDocs,
}
return modelsDocs, nil
}
func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) { func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) {
jobsDocs, err := reader.jobsDocs() jobsDocs, err := reader.jobsDocs()
if err != nil { if err != nil {
@ -192,12 +218,22 @@ func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
experimentsDocs, err := reader.experimentsDocs()
if err != nil {
return nil, err
}
modelsDocs, err := reader.modelsDocs()
if err != nil {
return nil, err
}
return &Docs{ return &Docs{
Description: "Specification of databricks resources to instantiate", Description: "Collection of Databricks resources to deploy.",
Properties: map[string]*Docs{ Properties: map[string]*Docs{
"jobs": jobsDocs, "jobs": jobsDocs,
"pipelines": pipelinesDocs, "pipelines": pipelinesDocs,
"experiments": experimentsDocs,
"models": modelsDocs,
}, },
}, nil }, nil
} }

View File

@ -0,0 +1,5 @@
bundle:
name: include_invalid
include:
- notexists.yml

View File

@ -0,0 +1,34 @@
package config_tests
import (
"context"
"path/filepath"
"sort"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/exp/maps"
)
func TestIncludeInvalid(t *testing.T) {
b, err := bundle.Load("./include_invalid")
require.NoError(t, err)
err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...))
require.Error(t, err)
assert.Contains(t, err.Error(), "notexists.yml defined in 'include' section does not match any files")
}
func TestIncludeWithGlob(t *testing.T) {
b := load(t, "./include_with_glob")
keys := maps.Keys(b.Config.Resources.Jobs)
sort.Strings(keys)
assert.Equal(t, []string{"my_job"}, keys)
job := b.Config.Resources.Jobs["my_job"]
assert.Equal(t, "1", job.ID)
assert.Equal(t, "include_with_glob/job.yml", filepath.ToSlash(job.ConfigFilePath))
}

View File

@ -0,0 +1,7 @@
bundle:
name: include_with_glob
include:
- "*.yml"
- "?.yml"
- "[a-z].yml"

View File

@ -0,0 +1,4 @@
resources:
jobs:
my_job:
id: 1

View File

@ -1,8 +1,11 @@
package auth package auth
import ( import (
"context"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/cmdio"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -11,10 +14,36 @@ var authCmd = &cobra.Command{
Short: "Authentication related commands", Short: "Authentication related commands",
} }
var perisistentAuth auth.PersistentAuth var persistentAuth auth.PersistentAuth
func promptForHost(ctx context.Context) (string, error) {
prompt := cmdio.Prompt(ctx)
prompt.Label = "Databricks Host"
prompt.Default = "https://"
prompt.AllowEdit = true
// Validate?
host, err := prompt.Run()
if err != nil {
return "", err
}
return host, nil
}
func promptForAccountID(ctx context.Context) (string, error) {
prompt := cmdio.Prompt(ctx)
prompt.Label = "Databricks Account ID"
prompt.Default = ""
prompt.AllowEdit = true
// Validate?
accountId, err := prompt.Run()
if err != nil {
return "", err
}
return accountId, nil
}
func init() { func init() {
root.RootCmd.AddCommand(authCmd) root.RootCmd.AddCommand(authCmd)
authCmd.PersistentFlags().StringVar(&perisistentAuth.Host, "host", perisistentAuth.Host, "Databricks Host") authCmd.PersistentFlags().StringVar(&persistentAuth.Host, "host", persistentAuth.Host, "Databricks Host")
authCmd.PersistentFlags().StringVar(&perisistentAuth.AccountID, "account-id", perisistentAuth.AccountID, "Databricks Account ID") authCmd.PersistentFlags().StringVar(&persistentAuth.AccountID, "account-id", persistentAuth.AccountID, "Databricks Account ID")
} }

View File

@ -17,16 +17,46 @@ import (
var loginTimeout time.Duration var loginTimeout time.Duration
var configureCluster bool var configureCluster bool
func configureHost(ctx context.Context, args []string, argIndex int) error {
if len(args) > argIndex {
persistentAuth.Host = args[argIndex]
return nil
}
host, err := promptForHost(ctx)
if err != nil {
return err
}
persistentAuth.Host = host
return nil
}
var loginCmd = &cobra.Command{ var loginCmd = &cobra.Command{
Use: "login [HOST]", Use: "login [HOST]",
Short: "Authenticate this machine", Short: "Authenticate this machine",
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
if perisistentAuth.Host == "" && len(args) == 1 { ctx := cmd.Context()
perisistentAuth.Host = args[0] if persistentAuth.Host == "" {
configureHost(ctx, args, 0)
} }
defer persistentAuth.Close()
defer perisistentAuth.Close() // We need the config without the profile before it's used to initialise new workspace client below.
ctx, cancel := context.WithTimeout(cmd.Context(), loginTimeout) // Otherwise it will complain about non existing profile because it was not yet saved.
cfg := config.Config{
Host: persistentAuth.Host,
AuthType: "databricks-cli",
}
if cfg.IsAccountClient() && persistentAuth.AccountID == "" {
accountId, err := promptForAccountID(ctx)
if err != nil {
return err
}
persistentAuth.AccountID = accountId
}
cfg.AccountID = persistentAuth.AccountID
ctx, cancel := context.WithTimeout(ctx, loginTimeout)
defer cancel() defer cancel()
var profileName string var profileName string
@ -36,7 +66,7 @@ var loginCmd = &cobra.Command{
} else { } else {
prompt := cmdio.Prompt(ctx) prompt := cmdio.Prompt(ctx)
prompt.Label = "Databricks Profile Name" prompt.Label = "Databricks Profile Name"
prompt.Default = perisistentAuth.ProfileName() prompt.Default = persistentAuth.ProfileName()
prompt.AllowEdit = true prompt.AllowEdit = true
profile, err := prompt.Run() profile, err := prompt.Run()
if err != nil { if err != nil {
@ -44,19 +74,11 @@ var loginCmd = &cobra.Command{
} }
profileName = profile profileName = profile
} }
err := perisistentAuth.Challenge(ctx) err := persistentAuth.Challenge(ctx)
if err != nil { if err != nil {
return err return err
} }
// We need the config without the profile before it's used to initialise new workspace client below.
// Otherwise it will complain about non existing profile because it was not yet saved.
cfg := config.Config{
Host: perisistentAuth.Host,
AccountID: perisistentAuth.AccountID,
AuthType: "databricks-cli",
}
if configureCluster { if configureCluster {
w, err := databricks.NewWorkspaceClient((*databricks.Config)(&cfg)) w, err := databricks.NewWorkspaceClient((*databricks.Config)(&cfg))
if err != nil { if err != nil {

View File

@ -15,13 +15,15 @@ var tokenCmd = &cobra.Command{
Use: "token [HOST]", Use: "token [HOST]",
Short: "Get authentication token", Short: "Get authentication token",
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
if perisistentAuth.Host == "" && len(args) == 1 { ctx := cmd.Context()
perisistentAuth.Host = args[0] if persistentAuth.Host == "" {
configureHost(ctx, args, 0)
} }
defer perisistentAuth.Close() defer persistentAuth.Close()
ctx, cancel := context.WithTimeout(cmd.Context(), tokenTimeout)
ctx, cancel := context.WithTimeout(ctx, tokenTimeout)
defer cancel() defer cancel()
t, err := perisistentAuth.Load(ctx) t, err := persistentAuth.Load(ctx)
if err != nil { if err != nil {
return err return err
} }

View File

@ -132,8 +132,8 @@ var cpCmd = &cobra.Command{
Short: "Copy files and directories to and from DBFS.", Short: "Copy files and directories to and from DBFS.",
Long: `Copy files to and from DBFS. Long: `Copy files to and from DBFS.
It is required that you specify the scheme "file" for local files and For paths in DBFS it is required that you specify the "dbfs" scheme.
"dbfs" for dbfs files. For example: file:/foo/bar, file:/c:/foo/bar or dbfs:/foo/bar. For example: dbfs:/foo/bar.
Recursively copying a directory will copy all files inside directory Recursively copying a directory will copy all files inside directory
at SOURCE_PATH to the directory at TARGET_PATH. at SOURCE_PATH to the directory at TARGET_PATH.

View File

@ -23,6 +23,7 @@ var currentUser int
func init() { func init() {
RootCmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile") RootCmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile")
RootCmd.RegisterFlagCompletionFunc("profile", databrickscfg.ProfileCompletion)
} }
func MustAccountClient(cmd *cobra.Command, args []string) error { func MustAccountClient(cmd *cobra.Command, args []string) error {

View File

@ -26,6 +26,20 @@ func getEnvironment(cmd *cobra.Command) (value string) {
return os.Getenv(envName) return os.Getenv(envName)
} }
func getProfile(cmd *cobra.Command) (value string) {
// The command line flag takes precedence.
flag := cmd.Flag("profile")
if flag != nil {
value = flag.Value.String()
if value != "" {
return
}
}
// If it's not set, use the environment variable.
return os.Getenv("DATABRICKS_CONFIG_PROFILE")
}
// loadBundle loads the bundle configuration and applies default mutators. // loadBundle loads the bundle configuration and applies default mutators.
func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, error)) (*bundle.Bundle, error) { func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, error)) (*bundle.Bundle, error) {
b, err := load() b, err := load()
@ -38,6 +52,11 @@ func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle,
return nil, nil return nil, nil
} }
profile := getProfile(cmd)
if profile != "" {
b.Config.Workspace.Profile = profile
}
ctx := cmd.Context() ctx := cmd.Context()
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
if err != nil { if err != nil {

119
cmd/root/bundle_test.go Normal file
View File

@ -0,0 +1,119 @@
package root
import (
"context"
"os"
"path/filepath"
"runtime"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/assert"
)
func setupDatabricksCfg(t *testing.T) {
tempHomeDir := t.TempDir()
homeEnvVar := "HOME"
if runtime.GOOS == "windows" {
homeEnvVar = "USERPROFILE"
}
cfg := []byte("[PROFILE-1]\nhost = https://a.com\ntoken = a\n[PROFILE-2]\nhost = https://a.com\ntoken = b\n")
err := os.WriteFile(filepath.Join(tempHomeDir, ".databrickscfg"), cfg, 0644)
assert.NoError(t, err)
t.Setenv("DATABRICKS_CONFIG_FILE", "")
t.Setenv(homeEnvVar, tempHomeDir)
}
func setup(t *testing.T, host string) *bundle.Bundle {
setupDatabricksCfg(t)
ctx := context.Background()
RootCmd.SetContext(ctx)
_, err := initializeLogger(ctx)
assert.NoError(t, err)
err = configureBundle(RootCmd, []string{"validate"}, func() (*bundle.Bundle, error) {
return &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "test",
},
Workspace: config.Workspace{
Host: host,
},
},
}, nil
})
assert.NoError(t, err)
return bundle.Get(RootCmd.Context())
}
func TestBundleConfigureDefault(t *testing.T) {
b := setup(t, "https://x.com")
assert.NotPanics(t, func() {
b.WorkspaceClient()
})
}
func TestBundleConfigureWithMultipleMatches(t *testing.T) {
b := setup(t, "https://a.com")
assert.Panics(t, func() {
b.WorkspaceClient()
})
}
func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) {
RootCmd.Flag("profile").Value.Set("NOEXIST")
b := setup(t, "https://x.com")
assert.PanicsWithError(t, "no matching config profiles found", func() {
b.WorkspaceClient()
})
}
func TestBundleConfigureWithMismatchedProfile(t *testing.T) {
RootCmd.Flag("profile").Value.Set("PROFILE-1")
b := setup(t, "https://x.com")
assert.PanicsWithError(t, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() {
b.WorkspaceClient()
})
}
func TestBundleConfigureWithCorrectProfile(t *testing.T) {
RootCmd.Flag("profile").Value.Set("PROFILE-1")
b := setup(t, "https://a.com")
assert.NotPanics(t, func() {
b.WorkspaceClient()
})
}
func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) {
t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-1")
t.Cleanup(func() {
t.Setenv("DATABRICKS_CONFIG_PROFILE", "")
})
b := setup(t, "https://x.com")
assert.PanicsWithError(t, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() {
b.WorkspaceClient()
})
}
func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) {
t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST")
t.Cleanup(func() {
t.Setenv("DATABRICKS_CONFIG_PROFILE", "")
})
RootCmd.Flag("profile").Value.Set("PROFILE-1")
b := setup(t, "https://a.com")
assert.NotPanics(t, func() {
b.WorkspaceClient()
})
}

View File

@ -1,6 +1,11 @@
package secrets package secrets
import ( import (
"encoding/base64"
"fmt"
"io"
"os"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/flags"
@ -40,15 +45,14 @@ var putSecretCmd = &cobra.Command{
and cannot exceed 128 characters. The maximum allowed secret value size is 128 and cannot exceed 128 characters. The maximum allowed secret value size is 128
KB. The maximum number of secrets in a given scope is 1000. KB. The maximum number of secrets in a given scope is 1000.
The input fields "string_value" or "bytes_value" specify the type of the The arguments "string-value" or "bytes-value" specify the type of the secret,
secret, which will determine the value returned when the secret value is which will determine the value returned when the secret value is requested.
requested. Exactly one must be specified.
Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws You can specify the secret value in one of three ways:
RESOURCE_LIMIT_EXCEEDED if maximum number of secrets in scope is exceeded. * Specify the value as a string using the --string-value flag.
Throws INVALID_PARAMETER_VALUE if the key name or value length is invalid. * Input the secret when prompted interactively (single-line secrets).
Throws PERMISSION_DENIED if the user does not have permission to make this * Pass the secret via standard input (multi-line secrets).
API call.`, `,
Annotations: map[string]string{}, Annotations: map[string]string{},
Args: func(cmd *cobra.Command, args []string) error { Args: func(cmd *cobra.Command, args []string) error {
@ -62,6 +66,13 @@ var putSecretCmd = &cobra.Command{
RunE: func(cmd *cobra.Command, args []string) (err error) { RunE: func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context() ctx := cmd.Context()
w := root.WorkspaceClient(ctx) w := root.WorkspaceClient(ctx)
bytesValueChanged := cmd.Flags().Changed("bytes-value")
stringValueChanged := cmd.Flags().Changed("string-value")
if bytesValueChanged && stringValueChanged {
return fmt.Errorf("cannot specify both --bytes-value and --string-value")
}
if cmd.Flags().Changed("json") { if cmd.Flags().Changed("json") {
err = putSecretJson.Unmarshal(&putSecretReq) err = putSecretJson.Unmarshal(&putSecretReq)
if err != nil { if err != nil {
@ -71,12 +82,20 @@ var putSecretCmd = &cobra.Command{
putSecretReq.Scope = args[0] putSecretReq.Scope = args[0]
putSecretReq.Key = args[1] putSecretReq.Key = args[1]
value, err := cmdio.Secret(ctx) switch {
if err != nil { case bytesValueChanged:
return err // Bytes value set; encode as base64.
putSecretReq.BytesValue = base64.StdEncoding.EncodeToString([]byte(putSecretReq.BytesValue))
case stringValueChanged:
// String value set; nothing to do.
default:
// Neither is specified; read secret value from stdin.
bytes, err := promptSecret(cmd)
if err != nil {
return err
}
putSecretReq.BytesValue = base64.StdEncoding.EncodeToString(bytes)
} }
putSecretReq.StringValue = value
} }
err = w.Secrets.PutSecret(ctx, putSecretReq) err = w.Secrets.PutSecret(ctx, putSecretReq)
@ -86,3 +105,17 @@ var putSecretCmd = &cobra.Command{
return nil return nil
}, },
} }
func promptSecret(cmd *cobra.Command) ([]byte, error) {
// If stdin is a TTY, prompt for the secret.
if !cmdio.IsInTTY(cmd.Context()) {
return io.ReadAll(os.Stdin)
}
value, err := cmdio.Secret(cmd.Context(), "Please enter your secret value")
if err != nil {
return nil, err
}
return []byte(value), nil
}

14
go.mod
View File

@ -23,11 +23,11 @@ require (
github.com/stretchr/testify v1.8.4 // MIT github.com/stretchr/testify v1.8.4 // MIT
github.com/whilp/git-urls v1.0.0 // MIT github.com/whilp/git-urls v1.0.0 // MIT
golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0
golang.org/x/mod v0.11.0 golang.org/x/mod v0.12.0
golang.org/x/oauth2 v0.9.0 golang.org/x/oauth2 v0.10.0
golang.org/x/sync v0.3.0 golang.org/x/sync v0.3.0
golang.org/x/term v0.9.0 golang.org/x/term v0.10.0
golang.org/x/text v0.10.0 golang.org/x/text v0.11.0
gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0
) )
@ -50,9 +50,9 @@ require (
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/zclconf/go-cty v1.13.2 // indirect github.com/zclconf/go-cty v1.13.2 // indirect
go.opencensus.io v0.24.0 // indirect go.opencensus.io v0.24.0 // indirect
golang.org/x/crypto v0.10.0 // indirect golang.org/x/crypto v0.11.0 // indirect
golang.org/x/net v0.11.0 // indirect golang.org/x/net v0.12.0 // indirect
golang.org/x/sys v0.9.0 // indirect golang.org/x/sys v0.10.0 // indirect
golang.org/x/time v0.3.0 // indirect golang.org/x/time v0.3.0 // indirect
google.golang.org/api v0.129.0 // indirect google.golang.org/api v0.129.0 // indirect
google.golang.org/appengine v1.6.7 // indirect google.golang.org/appengine v1.6.7 // indirect

28
go.sum
View File

@ -163,8 +163,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM= golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I= golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 h1:LGJsf5LRplCck6jUCH3dBL2dmycNruWNF5xugkSlfXw= golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 h1:LGJsf5LRplCck6jUCH3dBL2dmycNruWNF5xugkSlfXw=
golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
@ -172,8 +172,8 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -187,12 +187,12 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU= golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.9.0 h1:BPpt2kU7oMRq3kCHAA1tbSEshXRw1LpG2ztgDwrzuAs= golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8=
golang.org/x/oauth2 v0.9.0/go.mod h1:qYgFZaFiu6Wg24azG8bdV52QJXJGbZzIIsRCdVKzbLw= golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -217,20 +217,20 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28= golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
golang.org/x/term v0.9.0/go.mod h1:M6DEAAIenWoTxdKrOltXcmDY3rSplQUkrvaDU5FcQyo= golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58= golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

42
internal/acc/debug.go Normal file
View File

@ -0,0 +1,42 @@
package acc
import (
"encoding/json"
"os"
"path"
"path/filepath"
"testing"
)
// Detects if test is run from "debug test" feature in VS Code.
func isInDebug() bool {
ex, _ := os.Executable()
return path.Base(ex) == "__debug_bin"
}
// Loads debug environment from ~/.databricks/debug-env.json.
func loadDebugEnvIfRunFromIDE(t *testing.T, key string) {
if !isInDebug() {
return
}
home, err := os.UserHomeDir()
if err != nil {
t.Fatalf("cannot find user home: %s", err)
}
raw, err := os.ReadFile(filepath.Join(home, ".databricks/debug-env.json"))
if err != nil {
t.Fatalf("cannot load ~/.databricks/debug-env.json: %s", err)
}
var conf map[string]map[string]string
err = json.Unmarshal(raw, &conf)
if err != nil {
t.Fatalf("cannot parse ~/.databricks/debug-env.json: %s", err)
}
vars, ok := conf[key]
if !ok {
t.Fatalf("~/.databricks/debug-env.json#%s not configured", key)
}
for k, v := range vars {
os.Setenv(k, v)
}
}

35
internal/acc/helpers.go Normal file
View File

@ -0,0 +1,35 @@
package acc
import (
"fmt"
"math/rand"
"os"
"strings"
"testing"
"time"
)
// GetEnvOrSkipTest proceeds with test only with that env variable.
func GetEnvOrSkipTest(t *testing.T, name string) string {
value := os.Getenv(name)
if value == "" {
t.Skipf("Environment variable %s is missing", name)
}
return value
}
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
// RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-")
func RandomName(prefix ...string) string {
rand.Seed(time.Now().UnixNano())
randLen := 12
b := make([]byte, randLen)
for i := range b {
b[i] = charset[rand.Intn(randLen)]
}
if len(prefix) > 0 {
return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b)
}
return string(b)
}

68
internal/acc/workspace.go Normal file
View File

@ -0,0 +1,68 @@
package acc
import (
"context"
"testing"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/stretchr/testify/require"
)
type WorkspaceT struct {
*testing.T
W *databricks.WorkspaceClient
ctx context.Context
exec *compute.CommandExecutorV2
}
func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
loadDebugEnvIfRunFromIDE(t, "workspace")
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
wt := &WorkspaceT{
T: t,
W: w,
ctx: context.Background(),
}
return wt.ctx, wt
}
func (t *WorkspaceT) TestClusterID() string {
clusterID := GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID")
err := t.W.Clusters.EnsureClusterIsRunning(t.ctx, clusterID)
require.NoError(t, err)
return clusterID
}
func (t *WorkspaceT) RunPython(code string) (string, error) {
var err error
// Create command executor only once per test.
if t.exec == nil {
t.exec, err = t.W.CommandExecution.Start(t.ctx, t.TestClusterID(), compute.LanguagePython)
require.NoError(t, err)
t.Cleanup(func() {
err := t.exec.Destroy(t.ctx)
require.NoError(t, err)
})
}
results, err := t.exec.Execute(t.ctx, code)
require.NoError(t, err)
require.NotEqual(t, compute.ResultTypeError, results.ResultType, results.Cause)
output, ok := results.Data.(string)
require.True(t, ok, "unexpected type %T", results.Data)
return output, nil
}

View File

@ -1,12 +1,98 @@
package internal package internal
import ( import (
"context"
"encoding/base64"
"fmt"
"testing" "testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) { func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) {
_, _, err := RequireErrorRun(t, "secrets", "create-scope") _, _, err := RequireErrorRun(t, "secrets", "create-scope")
assert.Equal(t, "accepts 1 arg(s), received 0", err.Error()) assert.Equal(t, "accepts 1 arg(s), received 0", err.Error())
} }
func temporarySecretScope(ctx context.Context, t *acc.WorkspaceT) string {
scope := acc.RandomName("cli-acc-")
err := t.W.Secrets.CreateScope(ctx, workspace.CreateScope{
Scope: scope,
})
require.NoError(t, err)
// Delete the scope after the test.
t.Cleanup(func() {
err := t.W.Secrets.DeleteScopeByScope(ctx, scope)
require.NoError(t, err)
})
return scope
}
func assertSecretStringValue(t *acc.WorkspaceT, scope, key, expected string) {
out, err := t.RunPython(fmt.Sprintf(`
import base64
value = dbutils.secrets.get(scope="%s", key="%s")
encoded_value = base64.b64encode(value.encode('utf-8'))
print(encoded_value.decode('utf-8'))
`, scope, key))
require.NoError(t, err)
decoded, err := base64.StdEncoding.DecodeString(out)
require.NoError(t, err)
assert.Equal(t, expected, string(decoded))
}
func assertSecretBytesValue(t *acc.WorkspaceT, scope, key string, expected []byte) {
out, err := t.RunPython(fmt.Sprintf(`
import base64
value = dbutils.secrets.getBytes(scope="%s", key="%s")
encoded_value = base64.b64encode(value)
print(encoded_value.decode('utf-8'))
`, scope, key))
require.NoError(t, err)
decoded, err := base64.StdEncoding.DecodeString(out)
require.NoError(t, err)
assert.Equal(t, expected, decoded)
}
func TestSecretsPutSecretStringValue(tt *testing.T) {
ctx, t := acc.WorkspaceTest(tt)
scope := temporarySecretScope(ctx, t)
key := "test-key"
value := "test-value\nwith-newlines\n"
stdout, stderr := RequireSuccessfulRun(t.T, "secrets", "put-secret", scope, key, "--string-value", value)
assert.Empty(t, stdout)
assert.Empty(t, stderr)
assertSecretStringValue(t, scope, key, value)
assertSecretBytesValue(t, scope, key, []byte(value))
}
func TestSecretsPutSecretBytesValue(tt *testing.T) {
ctx, t := acc.WorkspaceTest(tt)
if true {
// Uncomment below to run this test in isolation.
// To be addressed once none of the commands taint global state.
t.Skip("skipping because the test above clobbers global state")
}
scope := temporarySecretScope(ctx, t)
key := "test-key"
value := []byte{0x00, 0x01, 0x02, 0x03}
stdout, stderr := RequireSuccessfulRun(t.T, "secrets", "put-secret", scope, key, "--bytes-value", string(value))
assert.Empty(t, stdout)
assert.Empty(t, stderr)
// Note: this value cannot be represented as Python string,
// so we only check equality through the dbutils.secrets.getBytes API.
assertSecretBytesValue(t, scope, key, value)
}

View File

@ -174,18 +174,19 @@ func Select[V any](ctx context.Context, names map[string]V, label string) (id st
return c.Select(stringNames, label) return c.Select(stringNames, label)
} }
func (c *cmdIO) Secret() (value string, err error) { func (c *cmdIO) Secret(label string) (value string, err error) {
prompt := (promptui.Prompt{ prompt := (promptui.Prompt{
Label: "Enter your secrets value", Label: label,
Mask: '*', Mask: '*',
HideEntered: true,
}) })
return prompt.Run() return prompt.Run()
} }
func Secret(ctx context.Context) (value string, err error) { func Secret(ctx context.Context, label string) (value string, err error) {
c := fromContext(ctx) c := fromContext(ctx)
return c.Secret() return c.Secret(label)
} }
type nopWriteCloser struct { type nopWriteCloser struct {

View File

@ -90,7 +90,7 @@ func (l profileFromHostLoader) Configure(cfg *config.Config) error {
} }
if err, ok := err.(errMultipleProfiles); ok { if err, ok := err.(errMultipleProfiles); ok {
return fmt.Errorf( return fmt.Errorf(
"%s: %w: please set DATABRICKS_CONFIG_PROFILE to specify one", "%s: %w: please set DATABRICKS_CONFIG_PROFILE or provide --profile flag to specify one",
host, err) host, err)
} }
if err != nil { if err != nil {

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/config" "github.com/databricks/databricks-sdk-go/config"
"gopkg.in/ini.v1" "gopkg.in/ini.v1"
) )
@ -129,6 +130,29 @@ func SaveToProfile(ctx context.Context, cfg *config.Config) error {
return configFile.SaveTo(configFile.Path()) return configFile.SaveTo(configFile.Path())
} }
func ValidateConfigAndProfileHost(cfg *databricks.Config, profile string) error {
configFile, err := config.LoadFile(cfg.ConfigFile)
if err != nil {
return fmt.Errorf("cannot parse config file: %w", err)
}
// Normalized version of the configured host.
host := normalizeHost(cfg.Host)
match, err := findMatchingProfile(configFile, func(s *ini.Section) bool {
return profile == s.Name()
})
if err != nil {
return err
}
hostFromProfile := normalizeHost(match.Key("host").Value())
if hostFromProfile != "" && host != "" && hostFromProfile != host {
return fmt.Errorf("config host mismatch: profile uses host %s, but CLI configured to use %s", hostFromProfile, host)
}
return nil
}
func init() { func init() {
// We document databrickscfg files with a [DEFAULT] header and wish to keep it that way. // We document databrickscfg files with a [DEFAULT] header and wish to keep it that way.
// This, however, does mean we emit a [DEFAULT] section even if it's empty. // This, however, does mean we emit a [DEFAULT] section even if it's empty.

View File

@ -5,6 +5,7 @@ import (
"strings" "strings"
"github.com/databricks/databricks-sdk-go/config" "github.com/databricks/databricks-sdk-go/config"
"github.com/spf13/cobra"
) )
// Profile holds a subset of the keys in a databrickscfg profile. // Profile holds a subset of the keys in a databrickscfg profile.
@ -59,6 +60,10 @@ func MatchAccountProfiles(p Profile) bool {
return p.Host != "" && p.AccountID != "" return p.Host != "" && p.AccountID != ""
} }
func MatchAllProfiles(p Profile) bool {
return true
}
const DefaultPath = "~/.databrickscfg" const DefaultPath = "~/.databrickscfg"
func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles Profiles, err error) { func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles Profiles, err error) {
@ -99,3 +104,11 @@ func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles P
return return
} }
func ProfileCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
_, profiles, err := LoadProfiles(DefaultPath, MatchAllProfiles)
if err != nil {
return nil, cobra.ShellCompDirectiveError
}
return profiles.Names(), cobra.ShellCompDirectiveNoFileComp
}

View File

@ -13,42 +13,6 @@ import (
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
) )
// Return if the child path is nested under the parent path.
func isPathNestedUnder(child, parent string) bool {
child = path.Clean(child)
parent = path.Clean(parent)
// Traverse up the tree as long as "child" is contained in "parent".
for len(child) > len(parent) && strings.HasPrefix(child, parent) {
child = path.Dir(child)
if child == parent {
return true
}
}
return false
}
// Check if the specified path is nested under one of the allowed base paths.
func checkPathNestedUnderBasePaths(me *iam.User, p string) error {
validBasePaths := []string{
path.Clean(fmt.Sprintf("/Users/%s", me.UserName)),
path.Clean(fmt.Sprintf("/Repos/%s", me.UserName)),
}
givenBasePath := path.Clean(p)
match := false
for _, basePath := range validBasePaths {
if isPathNestedUnder(givenBasePath, basePath) {
match = true
break
}
}
if !match {
return fmt.Errorf("path must be nested under %s", strings.Join(validBasePaths, " or "))
}
return nil
}
func repoPathForPath(me *iam.User, remotePath string) string { func repoPathForPath(me *iam.User, remotePath string) string {
base := path.Clean(fmt.Sprintf("/Repos/%s", me.UserName)) base := path.Clean(fmt.Sprintf("/Repos/%s", me.UserName))
remotePath = path.Clean(remotePath) remotePath = path.Clean(remotePath)
@ -66,11 +30,6 @@ func EnsureRemotePathIsUsable(ctx context.Context, wsc *databricks.WorkspaceClie
return err return err
} }
err = checkPathNestedUnderBasePaths(me, remotePath)
if err != nil {
return err
}
// Ensure that the remote path exists. // Ensure that the remote path exists.
// If it is a repo, it has to exist. // If it is a repo, it has to exist.
// If it is a workspace path, it may not exist. // If it is a workspace path, it may not exist.

View File

@ -7,37 +7,6 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestPathNestedUnderBasePaths(t *testing.T) {
me := iam.User{
UserName: "jane@doe.com",
}
// Not nested under allowed base paths.
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/."))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/.."))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/john@doe.com"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.comsuffix/foo"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/."))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/.."))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/john@doe.com"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.comsuffix/foo"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users"))
assert.Error(t, checkPathNestedUnderBasePaths(&me, "/"))
// Nested under allowed base paths.
assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/foo"))
assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/./foo"))
assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/foo/bar/qux"))
assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/foo"))
assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/./foo"))
assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/foo/bar/qux"))
}
func TestPathToRepoPath(t *testing.T) { func TestPathToRepoPath(t *testing.T) {
me := iam.User{ me := iam.User{
UserName: "jane@doe.com", UserName: "jane@doe.com",