Move the CLI test runner to `internal/testcli` package (#2004)

## Changes

The CLI test runner instantiates a new CLI "instance" through
`cmd.New()` and runs it with specified arguments. This is as close as we
get to running the real CLI **in-process**. This runner was located in
the `internal` package next to other helpers. This change moves it to
its own dedicated package.

Note: this runner transitively imports pretty much the entire
repository, which is why we intentionally keep it _separate_ from
`testutil`.

## Tests

n/a
This commit is contained in:
Pieter Noordhuis 2024-12-12 17:48:51 +01:00 committed by GitHub
parent dd3b7ec450
commit e472b5d888
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
34 changed files with 500 additions and 462 deletions

View File

@ -1,22 +1,22 @@
package config_tests package config_tests
import ( import (
"path/filepath" "context"
"testing" "testing"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/bundle"
assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/require"
"github.com/databricks/cli/internal"
) )
func TestSuggestTargetIfWrongPassed(t *testing.T) { func TestSuggestTargetIfWrongPassed(t *testing.T) {
t.Setenv("BUNDLE_ROOT", filepath.Join("target_overrides", "workspace")) b := load(t, "target_overrides/workspace")
stdoutBytes, _, err := internal.RequireErrorRun(t, "bundle", "validate", "-e", "incorrect")
stdout := stdoutBytes.String()
assert.Error(t, root.ErrAlreadyPrinted, err) ctx := context.Background()
assert.Contains(t, stdout, "Available targets:") diags := bundle.Apply(ctx, b, mutator.SelectTarget("incorrect"))
assert.Contains(t, stdout, "development") err := diags.Error()
assert.Contains(t, stdout, "staging") require.Error(t, err)
require.Contains(t, err.Error(), "Available targets:")
require.Contains(t, err.Error(), "development")
require.Contains(t, err.Error(), "staging")
} }

View File

@ -4,14 +4,14 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
) )
func TestListsInstalledProjects(t *testing.T) { func TestListsInstalledProjects(t *testing.T) {
ctx := context.Background() ctx := context.Background()
ctx = env.WithUserHomeDir(ctx, "project/testdata/installed-in-home") ctx = env.WithUserHomeDir(ctx, "project/testdata/installed-in-home")
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "installed") r := testcli.NewRunnerWithContext(t, ctx, "labs", "installed")
r.RunAndExpectOutput(` r.RunAndExpectOutput(`
Name Description Version Name Description Version
blueprint Blueprint Project v0.3.15 blueprint Blueprint Project v0.3.15

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -12,7 +12,7 @@ import (
func TestListingWorks(t *testing.T) { func TestListingWorks(t *testing.T) {
ctx := context.Background() ctx := context.Background()
ctx = env.WithUserHomeDir(ctx, "project/testdata/installed-in-home") ctx = env.WithUserHomeDir(ctx, "project/testdata/installed-in-home")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "list") c := testcli.NewRunnerWithContext(t, ctx, "labs", "list")
stdout, _, err := c.Run() stdout, _, err := c.Run()
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, stdout.String(), "ucx") require.Contains(t, stdout.String(), "ucx")

View File

@ -6,7 +6,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/python" "github.com/databricks/cli/libs/python"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -30,7 +30,7 @@ func devEnvContext(t *testing.T) context.Context {
func TestRunningBlueprintEcho(t *testing.T) { func TestRunningBlueprintEcho(t *testing.T) {
ctx := devEnvContext(t) ctx := devEnvContext(t)
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "echo") r := testcli.NewRunnerWithContext(t, ctx, "labs", "blueprint", "echo")
var out echoOut var out echoOut
r.RunAndParseJSON(&out) r.RunAndParseJSON(&out)
assert.Equal(t, "echo", out.Command) assert.Equal(t, "echo", out.Command)
@ -41,14 +41,14 @@ func TestRunningBlueprintEcho(t *testing.T) {
func TestRunningBlueprintEchoProfileWrongOverride(t *testing.T) { func TestRunningBlueprintEchoProfileWrongOverride(t *testing.T) {
ctx := devEnvContext(t) ctx := devEnvContext(t)
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "echo", "--profile", "workspace-profile") r := testcli.NewRunnerWithContext(t, ctx, "labs", "blueprint", "echo", "--profile", "workspace-profile")
_, _, err := r.Run() _, _, err := r.Run()
assert.ErrorIs(t, err, databricks.ErrNotAccountClient) assert.ErrorIs(t, err, databricks.ErrNotAccountClient)
} }
func TestRunningCommand(t *testing.T) { func TestRunningCommand(t *testing.T) {
ctx := devEnvContext(t) ctx := devEnvContext(t)
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "foo") r := testcli.NewRunnerWithContext(t, ctx, "labs", "blueprint", "foo")
r.WithStdin() r.WithStdin()
defer r.CloseStdin() defer r.CloseStdin()
@ -60,7 +60,7 @@ func TestRunningCommand(t *testing.T) {
func TestRenderingTable(t *testing.T) { func TestRenderingTable(t *testing.T) {
ctx := devEnvContext(t) ctx := devEnvContext(t)
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "table") r := testcli.NewRunnerWithContext(t, ctx, "labs", "blueprint", "table")
r.RunAndExpectOutput(` r.RunAndExpectOutput(`
Key Value Key Value
First Second First Second

View File

@ -19,7 +19,7 @@ import (
"github.com/databricks/cli/cmd/labs/github" "github.com/databricks/cli/cmd/labs/github"
"github.com/databricks/cli/cmd/labs/project" "github.com/databricks/cli/cmd/labs/project"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/process" "github.com/databricks/cli/libs/process"
"github.com/databricks/cli/libs/python" "github.com/databricks/cli/libs/python"
@ -236,7 +236,7 @@ func TestInstallerWorksForReleases(t *testing.T) {
// │ │ │ └── site-packages // │ │ │ └── site-packages
// │ │ │ ├── ... // │ │ │ ├── ...
// │ │ │ ├── distutils-precedence.pth // │ │ │ ├── distutils-precedence.pth
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "install", "blueprint", "--debug") r := testcli.NewRunnerWithContext(t, ctx, "labs", "install", "blueprint", "--debug")
r.RunAndExpectOutput("setting up important infrastructure") r.RunAndExpectOutput("setting up important infrastructure")
} }
@ -356,7 +356,7 @@ account_id = abc
// └── databrickslabs-blueprint-releases.json // └── databrickslabs-blueprint-releases.json
// `databricks labs install .` means "verify this installer i'm developing does work" // `databricks labs install .` means "verify this installer i'm developing does work"
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "install", ".") r := testcli.NewRunnerWithContext(t, ctx, "labs", "install", ".")
r.WithStdin() r.WithStdin()
defer r.CloseStdin() defer r.CloseStdin()
@ -426,7 +426,7 @@ func TestUpgraderWorksForReleases(t *testing.T) {
ctx = env.Set(ctx, "DATABRICKS_CLUSTER_ID", "installer-cluster") ctx = env.Set(ctx, "DATABRICKS_CLUSTER_ID", "installer-cluster")
ctx = env.Set(ctx, "DATABRICKS_WAREHOUSE_ID", "installer-warehouse") ctx = env.Set(ctx, "DATABRICKS_WAREHOUSE_ID", "installer-warehouse")
r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "upgrade", "blueprint") r := testcli.NewRunnerWithContext(t, ctx, "labs", "upgrade", "blueprint")
r.RunAndExpectOutput("setting up important infrastructure") r.RunAndExpectOutput("setting up important infrastructure")
// Check if the stub was called with the 'python -m pip install' command // Check if the stub was called with the 'python -m pip install' command

View File

@ -3,6 +3,7 @@ package internal
import ( import (
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -10,6 +11,6 @@ import (
func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) { func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "alerts-legacy", "create") _, _, err := testcli.RequireErrorRun(t, "alerts-legacy", "create")
assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error()) assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error())
} }

View File

@ -11,13 +11,14 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
_ "github.com/databricks/cli/cmd/api" _ "github.com/databricks/cli/cmd/api"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
) )
func TestAccApiGet(t *testing.T) { func TestAccApiGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me") stdout, _ := testcli.RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me")
// Deserialize SCIM API response. // Deserialize SCIM API response.
var out map[string]any var out map[string]any
@ -44,11 +45,11 @@ func TestAccApiPost(t *testing.T) {
// Post to mkdir // Post to mkdir
{ {
RequireSuccessfulRun(t, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/mkdirs") testcli.RequireSuccessfulRun(t, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/mkdirs")
} }
// Post to delete // Post to delete
{ {
RequireSuccessfulRun(t, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/delete") testcli.RequireSuccessfulRun(t, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/delete")
} }
} }

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -13,7 +14,7 @@ import (
func TestAuthDescribeSuccess(t *testing.T) { func TestAuthDescribeSuccess(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "auth", "describe") stdout, _ := testcli.RequireSuccessfulRun(t, "auth", "describe")
outStr := stdout.String() outStr := stdout.String()
w, err := databricks.NewWorkspaceClient(&databricks.Config{}) w, err := databricks.NewWorkspaceClient(&databricks.Config{})
@ -34,7 +35,7 @@ func TestAuthDescribeSuccess(t *testing.T) {
func TestAuthDescribeFailure(t *testing.T) { func TestAuthDescribeFailure(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent") stdout, _ := testcli.RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent")
outStr := stdout.String() outStr := stdout.String()
require.NotEmpty(t, outStr) require.NotEmpty(t, outStr)

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
@ -257,7 +258,7 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") stdout, stderr, err := testcli.RequireErrorRun(t, "bundle", "deploy")
assert.Error(t, err) assert.Error(t, err)
assert.Equal(t, fmt.Sprintf(`Error: volume /Volumes/main/%s/doesnotexist does not exist: Not Found assert.Equal(t, fmt.Sprintf(`Error: volume /Volumes/main/%s/doesnotexist does not exist: Not Found
@ -294,7 +295,7 @@ func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") stdout, stderr, err := testcli.RequireErrorRun(t, "bundle", "deploy")
assert.Error(t, err) assert.Error(t, err)
assert.Equal(t, fmt.Sprintf(`Error: volume /Volumes/main/%s/my_volume does not exist: Not Found assert.Equal(t, fmt.Sprintf(`Error: volume /Volumes/main/%s/my_volume does not exist: Not Found

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
@ -39,7 +40,7 @@ func TestAccBindJobToExistingJob(t *testing.T) {
}) })
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId), "--auto-approve") c := testcli.NewRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId), "--auto-approve")
_, _, err = c.Run() _, _, err = c.Run()
require.NoError(t, err) require.NoError(t, err)
@ -61,7 +62,7 @@ func TestAccBindJobToExistingJob(t *testing.T) {
require.Equal(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) require.Equal(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId))
require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py")
c = internal.NewCobraTestRunner(t, "bundle", "deployment", "unbind", "foo") c = testcli.NewRunner(t, "bundle", "deployment", "unbind", "foo")
_, _, err = c.Run() _, _, err = c.Run()
require.NoError(t, err) require.NoError(t, err)
@ -107,7 +108,7 @@ func TestAccAbortBind(t *testing.T) {
// Bind should fail because prompting is not possible. // Bind should fail because prompting is not possible.
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId)) c := testcli.NewRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId))
// Expect error suggesting to use --auto-approve // Expect error suggesting to use --auto-approve
_, _, err = c.Run() _, _, err = c.Run()
@ -157,7 +158,7 @@ func TestAccGenerateAndBind(t *testing.T) {
}) })
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "job", c := testcli.NewRunnerWithContext(t, ctx, "bundle", "generate", "job",
"--key", "test_job_key", "--key", "test_job_key",
"--existing-job-id", fmt.Sprint(jobId), "--existing-job-id", fmt.Sprint(jobId),
"--config-dir", filepath.Join(bundleRoot, "resources"), "--config-dir", filepath.Join(bundleRoot, "resources"),
@ -173,7 +174,7 @@ func TestAccGenerateAndBind(t *testing.T) {
require.Len(t, matches, 1) require.Len(t, matches, 1)
c = internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "test_job_key", fmt.Sprint(jobId), "--auto-approve") c = testcli.NewRunner(t, "bundle", "deployment", "bind", "test_job_key", fmt.Sprint(jobId), "--auto-approve")
_, _, err = c.Run() _, _, err = c.Run()
require.NoError(t, err) require.NoError(t, err)

View File

@ -13,6 +13,7 @@ import (
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
@ -120,7 +121,7 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
// Redeploy the bundle // Redeploy the bundle
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") c := testcli.NewRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock")
stdout, stderr, err := c.Run() stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
@ -164,7 +165,7 @@ func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
// Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve. // Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve.
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") c := testcli.NewRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock")
stdout, stderr, err := c.Run() stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
@ -203,7 +204,7 @@ func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
// Redeploy the bundle, pointing the DLT pipeline to a different UC catalog. // Redeploy the bundle, pointing the DLT pipeline to a different UC catalog.
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--var=\"catalog=whatever\"") c := testcli.NewRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--var=\"catalog=whatever\"")
stdout, stderr, err := c.Run() stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
@ -284,7 +285,7 @@ func TestAccDeployUcVolume(t *testing.T) {
// Recreation of the volume without --auto-approve should fail since prompting is not possible // Recreation of the volume without --auto-approve should fail since prompting is not possible
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}").Run() stdout, stderr, err := testcli.NewRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}").Run()
assert.Error(t, err) assert.Error(t, err)
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following volumes. assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following volumes.
For managed volumes, the files stored in the volume are also deleted from your For managed volumes, the files stored in the volume are also deleted from your
@ -296,7 +297,7 @@ is removed from the catalog, but the underlying files are not deleted:
// Successfully recreate the volume with --auto-approve // Successfully recreate the volume with --auto-approve
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
_, _, err = internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}", "--auto-approve").Run() _, _, err = testcli.NewRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}", "--auto-approve").Run()
assert.NoError(t, err) assert.NoError(t, err)
// Assert the volume is updated successfully // Assert the volume is updated successfully

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -36,7 +37,7 @@ func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) {
}) })
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "job", c := testcli.NewRunnerWithContext(t, ctx, "bundle", "generate", "job",
"--existing-job-id", fmt.Sprint(jobId), "--existing-job-id", fmt.Sprint(jobId),
"--config-dir", filepath.Join(bundleRoot, "resources"), "--config-dir", filepath.Join(bundleRoot, "resources"),
"--source-dir", filepath.Join(bundleRoot, "src")) "--source-dir", filepath.Join(bundleRoot, "src"))

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -35,7 +36,7 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) {
}) })
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "pipeline", c := testcli.NewRunnerWithContext(t, ctx, "bundle", "generate", "pipeline",
"--existing-pipeline-id", fmt.Sprint(pipelineId), "--existing-pipeline-id", fmt.Sprint(pipelineId),
"--config-dir", filepath.Join(bundleRoot, "resources"), "--config-dir", filepath.Join(bundleRoot, "resources"),
"--source-dir", filepath.Join(bundleRoot, "src")) "--source-dir", filepath.Join(bundleRoot, "src"))

View File

@ -12,7 +12,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
@ -65,7 +65,7 @@ func writeConfigFile(t testutil.TestingT, config map[string]any) (string, error)
func validateBundle(t testutil.TestingT, ctx context.Context, path string) ([]byte, error) { func validateBundle(t testutil.TestingT, ctx context.Context, path string) ([]byte, error) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "validate", "--output", "json") c := testcli.NewRunnerWithContext(t, ctx, "bundle", "validate", "--output", "json")
stdout, _, err := c.Run() stdout, _, err := c.Run()
return stdout.Bytes(), err return stdout.Bytes(), err
} }
@ -85,7 +85,7 @@ func unmarshalConfig(t testutil.TestingT, data []byte) *bundle.Bundle {
func deployBundle(t testutil.TestingT, ctx context.Context, path string) error { func deployBundle(t testutil.TestingT, ctx context.Context, path string) error {
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve") c := testcli.NewRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve")
_, _, err := c.Run() _, _, err := c.Run()
return err return err
} }
@ -93,7 +93,7 @@ func deployBundle(t testutil.TestingT, ctx context.Context, path string) error {
func deployBundleWithArgs(t testutil.TestingT, ctx context.Context, path string, args ...string) (string, string, error) { func deployBundleWithArgs(t testutil.TestingT, ctx context.Context, path string, args ...string) (string, string, error) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
args = append([]string{"bundle", "deploy"}, args...) args = append([]string{"bundle", "deploy"}, args...)
c := internal.NewCobraTestRunnerWithContext(t, ctx, args...) c := testcli.NewRunnerWithContext(t, ctx, args...)
stdout, stderr, err := c.Run() stdout, stderr, err := c.Run()
return stdout.String(), stderr.String(), err return stdout.String(), stderr.String(), err
} }
@ -102,7 +102,7 @@ func deployBundleWithFlags(t testutil.TestingT, ctx context.Context, path string
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
args := []string{"bundle", "deploy", "--force-lock"} args := []string{"bundle", "deploy", "--force-lock"}
args = append(args, flags...) args = append(args, flags...)
c := internal.NewCobraTestRunnerWithContext(t, ctx, args...) c := testcli.NewRunnerWithContext(t, ctx, args...)
_, _, err := c.Run() _, _, err := c.Run()
return err return err
} }
@ -111,7 +111,7 @@ func runResource(t testutil.TestingT, ctx context.Context, path, key string) (st
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
ctx = cmdio.NewContext(ctx, cmdio.Default()) ctx = cmdio.NewContext(ctx, cmdio.Default())
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "run", key) c := testcli.NewRunnerWithContext(t, ctx, "bundle", "run", key)
stdout, _, err := c.Run() stdout, _, err := c.Run()
return stdout.String(), err return stdout.String(), err
} }
@ -123,14 +123,14 @@ func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key s
args := make([]string, 0) args := make([]string, 0)
args = append(args, "bundle", "run", key) args = append(args, "bundle", "run", key)
args = append(args, params...) args = append(args, params...)
c := internal.NewCobraTestRunnerWithContext(t, ctx, args...) c := testcli.NewRunnerWithContext(t, ctx, args...)
stdout, _, err := c.Run() stdout, _, err := c.Run()
return stdout.String(), err return stdout.String(), err
} }
func destroyBundle(t testutil.TestingT, ctx context.Context, path string) error { func destroyBundle(t testutil.TestingT, ctx context.Context, path string) error {
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "destroy", "--auto-approve") c := testcli.NewRunnerWithContext(t, ctx, "bundle", "destroy", "--auto-approve")
_, _, err := c.Run() _, _, err := c.Run()
return err return err
} }

View File

@ -6,6 +6,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing" "github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
@ -16,7 +17,7 @@ import (
func TestAccClustersList(t *testing.T) { func TestAccClustersList(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "clusters", "list") stdout, stderr := testcli.RequireSuccessfulRun(t, "clusters", "list")
outStr := stdout.String() outStr := stdout.String()
assert.Contains(t, outStr, "ID") assert.Contains(t, outStr, "ID")
assert.Contains(t, outStr, "Name") assert.Contains(t, outStr, "Name")
@ -32,14 +33,14 @@ func TestAccClustersGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
clusterId := findValidClusterID(t) clusterId := findValidClusterID(t)
stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId) stdout, stderr := testcli.RequireSuccessfulRun(t, "clusters", "get", clusterId)
outStr := stdout.String() outStr := stdout.String()
assert.Contains(t, outStr, fmt.Sprintf(`"cluster_id":"%s"`, clusterId)) assert.Contains(t, outStr, fmt.Sprintf(`"cluster_id":"%s"`, clusterId))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
} }
func TestClusterCreateErrorWhenNoArguments(t *testing.T) { func TestClusterCreateErrorWhenNoArguments(t *testing.T) {
_, _, err := RequireErrorRun(t, "clusters", "create") _, _, err := testcli.RequireErrorRun(t, "clusters", "create")
assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0")
} }

View File

@ -7,6 +7,7 @@ import (
"testing" "testing"
_ "github.com/databricks/cli/cmd/fs" _ "github.com/databricks/cli/cmd/fs"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -21,7 +22,7 @@ func TestAccFsCompletion(t *testing.T) {
f, tmpDir := setupDbfsFiler(t) f, tmpDir := setupDbfsFiler(t)
setupCompletionFile(t, f) setupCompletionFile(t, f)
stdout, _ := RequireSuccessfulRun(t, "__complete", "fs", "ls", tmpDir+"/") stdout, _ := testcli.RequireSuccessfulRun(t, "__complete", "fs", "ls", tmpDir+"/")
expectedOutput := fmt.Sprintf("%s/dir1/\n:2\n", tmpDir) expectedOutput := fmt.Sprintf("%s/dir1/\n:2\n", tmpDir)
assert.Equal(t, expectedOutput, stdout.String()) assert.Equal(t, expectedOutput, stdout.String())
} }

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -27,7 +28,7 @@ func TestAccFsCat(t *testing.T) {
err := f.Write(context.Background(), "hello.txt", strings.NewReader("abcd"), filer.CreateParentDirectories) err := f.Write(context.Background(), "hello.txt", strings.NewReader("abcd"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
stdout, stderr := RequireSuccessfulRun(t, "fs", "cat", path.Join(tmpDir, "hello.txt")) stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "cat", path.Join(tmpDir, "hello.txt"))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "abcd", stdout.String()) assert.Equal(t, "abcd", stdout.String())
}) })
@ -47,7 +48,7 @@ func TestAccFsCatOnADir(t *testing.T) {
err := f.Mkdir(context.Background(), "dir1") err := f.Mkdir(context.Background(), "dir1")
require.NoError(t, err) require.NoError(t, err)
_, _, err = RequireErrorRun(t, "fs", "cat", path.Join(tmpDir, "dir1")) _, _, err = testcli.RequireErrorRun(t, "fs", "cat", path.Join(tmpDir, "dir1"))
assert.ErrorAs(t, err, &filer.NotAFile{}) assert.ErrorAs(t, err, &filer.NotAFile{})
}) })
} }
@ -64,7 +65,7 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) {
_, tmpDir := tc.setupFiler(t) _, tmpDir := tc.setupFiler(t)
_, _, err := RequireErrorRun(t, "fs", "cat", path.Join(tmpDir, "non-existent-file")) _, _, err := testcli.RequireErrorRun(t, "fs", "cat", path.Join(tmpDir, "non-existent-file"))
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
}) })
} }
@ -73,7 +74,7 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) {
func TestAccFsCatForDbfsInvalidScheme(t *testing.T) { func TestAccFsCatForDbfsInvalidScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file") _, _, err := testcli.RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
assert.ErrorContains(t, err, "invalid scheme: dab") assert.ErrorContains(t, err, "invalid scheme: dab")
} }
@ -92,6 +93,6 @@ func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) {
err = f.Write(ctx, "hello.txt", strings.NewReader("abc")) err = f.Write(ctx, "hello.txt", strings.NewReader("abc"))
require.NoError(t, err) require.NoError(t, err)
_, _, err = RequireErrorRun(t, "fs", "cat", "dbfs:"+path.Join(tmpDir, "hello.txt"), "--output=json") _, _, err = testcli.RequireErrorRun(t, "fs", "cat", "dbfs:"+path.Join(tmpDir, "hello.txt"), "--output=json")
assert.ErrorContains(t, err, "json output not supported") assert.ErrorContains(t, err, "json output not supported")
} }

View File

@ -10,6 +10,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -134,7 +135,7 @@ func TestAccFsCpDir(t *testing.T) {
targetFiler, targetDir := tc.setupTarget(t) targetFiler, targetDir := tc.setupTarget(t)
setupSourceDir(t, context.Background(), sourceFiler) setupSourceDir(t, context.Background(), sourceFiler)
RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive") testcli.RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive")
assertTargetDir(t, context.Background(), targetFiler) assertTargetDir(t, context.Background(), targetFiler)
}) })
@ -154,7 +155,7 @@ func TestAccFsCpFileToFile(t *testing.T) {
targetFiler, targetDir := tc.setupTarget(t) targetFiler, targetDir := tc.setupTarget(t)
setupSourceFile(t, context.Background(), sourceFiler) setupSourceFile(t, context.Background(), sourceFiler)
RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "foo.txt"), path.Join(targetDir, "bar.txt")) testcli.RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "foo.txt"), path.Join(targetDir, "bar.txt"))
assertTargetFile(t, context.Background(), targetFiler, "bar.txt") assertTargetFile(t, context.Background(), targetFiler, "bar.txt")
}) })
@ -174,7 +175,7 @@ func TestAccFsCpFileToDir(t *testing.T) {
targetFiler, targetDir := tc.setupTarget(t) targetFiler, targetDir := tc.setupTarget(t)
setupSourceFile(t, context.Background(), sourceFiler) setupSourceFile(t, context.Background(), sourceFiler)
RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "foo.txt"), targetDir) testcli.RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "foo.txt"), targetDir)
assertTargetFile(t, context.Background(), targetFiler, "foo.txt") assertTargetFile(t, context.Background(), targetFiler, "foo.txt")
}) })
@ -193,7 +194,7 @@ func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) {
windowsPath := filepath.Join(filepath.FromSlash(sourceDir), "foo.txt") windowsPath := filepath.Join(filepath.FromSlash(sourceDir), "foo.txt")
RequireSuccessfulRun(t, "fs", "cp", windowsPath, targetDir) testcli.RequireSuccessfulRun(t, "fs", "cp", windowsPath, targetDir)
assertTargetFile(t, ctx, targetFiler, "foo.txt") assertTargetFile(t, ctx, targetFiler, "foo.txt")
} }
@ -214,7 +215,7 @@ func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) {
err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive") testcli.RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive")
assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "this should not be overwritten") assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "this should not be overwritten")
assertFileContent(t, context.Background(), targetFiler, "query.sql", "SELECT 1") assertFileContent(t, context.Background(), targetFiler, "query.sql", "SELECT 1")
assertFileContent(t, context.Background(), targetFiler, "pyNb.py", "# Databricks notebook source\nprint(123)") assertFileContent(t, context.Background(), targetFiler, "pyNb.py", "# Databricks notebook source\nprint(123)")
@ -239,7 +240,7 @@ func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) {
err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c")) testcli.RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c"))
assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "this should not be overwritten") assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "this should not be overwritten")
}) })
} }
@ -262,7 +263,7 @@ func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) {
err := targetFiler.Write(context.Background(), "a/b/c/dontoverwrite.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "a/b/c/dontoverwrite.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/dontoverwrite.txt")) testcli.RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/dontoverwrite.txt"))
assertFileContent(t, context.Background(), targetFiler, "a/b/c/dontoverwrite.txt", "this should not be overwritten") assertFileContent(t, context.Background(), targetFiler, "a/b/c/dontoverwrite.txt", "this should not be overwritten")
}) })
} }
@ -285,7 +286,7 @@ func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) {
err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive", "--overwrite") testcli.RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive", "--overwrite")
assertTargetDir(t, context.Background(), targetFiler) assertTargetDir(t, context.Background(), targetFiler)
}) })
} }
@ -308,7 +309,7 @@ func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) {
err := targetFiler.Write(context.Background(), "a/b/c/overwritten.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "a/b/c/overwritten.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/overwritten.txt"), "--overwrite") testcli.RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/overwritten.txt"), "--overwrite")
assertFileContent(t, context.Background(), targetFiler, "a/b/c/overwritten.txt", "hello, world\n") assertFileContent(t, context.Background(), targetFiler, "a/b/c/overwritten.txt", "hello, world\n")
}) })
} }
@ -331,7 +332,7 @@ func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) {
err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c"), "--overwrite") testcli.RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c"), "--overwrite")
assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "hello, world\n") assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "hello, world\n")
}) })
} }
@ -348,7 +349,7 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
_, tmpDir := tc.setupFiler(t) _, tmpDir := tc.setupFiler(t)
_, _, err := RequireErrorRun(t, "fs", "cp", path.Join(tmpDir), path.Join(tmpDir, "foobar")) _, _, err := testcli.RequireErrorRun(t, "fs", "cp", path.Join(tmpDir), path.Join(tmpDir, "foobar"))
r := regexp.MustCompile("source path .* is a directory. Please specify the --recursive flag") r := regexp.MustCompile("source path .* is a directory. Please specify the --recursive flag")
assert.Regexp(t, r, err.Error()) assert.Regexp(t, r, err.Error())
}) })
@ -358,7 +359,7 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) { func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b") _, _, err := testcli.RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b")
assert.Equal(t, "invalid scheme: https", err.Error()) assert.Equal(t, "invalid scheme: https", err.Error())
} }
@ -379,7 +380,7 @@ func TestAccFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) {
err := targetFiler.Write(context.Background(), "my_target", strings.NewReader("I'll block any attempts to recursively copy"), filer.CreateParentDirectories) err := targetFiler.Write(context.Background(), "my_target", strings.NewReader("I'll block any attempts to recursively copy"), filer.CreateParentDirectories)
require.NoError(t, err) require.NoError(t, err)
_, _, err = RequireErrorRun(t, "fs", "cp", sourceDir, path.Join(targetDir, "my_target"), "--recursive") _, _, err = testcli.RequireErrorRun(t, "fs", "cp", sourceDir, path.Join(targetDir, "my_target"), "--recursive")
assert.Error(t, err) assert.Error(t, err)
}) })
} }

View File

@ -10,6 +10,7 @@ import (
"testing" "testing"
_ "github.com/databricks/cli/cmd/fs" _ "github.com/databricks/cli/cmd/fs"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -51,7 +52,7 @@ func TestAccFsLs(t *testing.T) {
f, tmpDir := tc.setupFiler(t) f, tmpDir := tc.setupFiler(t)
setupLsFiles(t, f) setupLsFiles(t, f)
stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json") stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
var parsedStdout []map[string]any var parsedStdout []map[string]any
@ -84,7 +85,7 @@ func TestAccFsLsWithAbsolutePaths(t *testing.T) {
f, tmpDir := tc.setupFiler(t) f, tmpDir := tc.setupFiler(t)
setupLsFiles(t, f) setupLsFiles(t, f)
stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json", "--absolute") stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json", "--absolute")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
var parsedStdout []map[string]any var parsedStdout []map[string]any
@ -116,7 +117,7 @@ func TestAccFsLsOnFile(t *testing.T) {
f, tmpDir := tc.setupFiler(t) f, tmpDir := tc.setupFiler(t)
setupLsFiles(t, f) setupLsFiles(t, f)
_, _, err := RequireErrorRun(t, "fs", "ls", path.Join(tmpDir, "a", "hello.txt"), "--output=json") _, _, err := testcli.RequireErrorRun(t, "fs", "ls", path.Join(tmpDir, "a", "hello.txt"), "--output=json")
assert.Regexp(t, regexp.MustCompile("not a directory: .*/a/hello.txt"), err.Error()) assert.Regexp(t, regexp.MustCompile("not a directory: .*/a/hello.txt"), err.Error())
assert.ErrorAs(t, err, &filer.NotADirectory{}) assert.ErrorAs(t, err, &filer.NotADirectory{})
}) })
@ -134,7 +135,7 @@ func TestAccFsLsOnEmptyDir(t *testing.T) {
_, tmpDir := tc.setupFiler(t) _, tmpDir := tc.setupFiler(t)
stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json") stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
var parsedStdout []map[string]any var parsedStdout []map[string]any
err := json.Unmarshal(stdout.Bytes(), &parsedStdout) err := json.Unmarshal(stdout.Bytes(), &parsedStdout)
@ -157,7 +158,7 @@ func TestAccFsLsForNonexistingDir(t *testing.T) {
_, tmpDir := tc.setupFiler(t) _, tmpDir := tc.setupFiler(t)
_, _, err := RequireErrorRun(t, "fs", "ls", path.Join(tmpDir, "nonexistent"), "--output=json") _, _, err := testcli.RequireErrorRun(t, "fs", "ls", path.Join(tmpDir, "nonexistent"), "--output=json")
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
assert.Regexp(t, regexp.MustCompile("no such directory: .*/nonexistent"), err.Error()) assert.Regexp(t, regexp.MustCompile("no such directory: .*/nonexistent"), err.Error())
}) })
@ -169,6 +170,6 @@ func TestAccFsLsWithoutScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json") _, _, err := testcli.RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json")
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
} }

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -24,7 +25,7 @@ func TestAccFsMkdir(t *testing.T) {
f, tmpDir := tc.setupFiler(t) f, tmpDir := tc.setupFiler(t)
// create directory "a" // create directory "a"
stdout, stderr := RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a")) stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a"))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
@ -49,7 +50,7 @@ func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) {
f, tmpDir := tc.setupFiler(t) f, tmpDir := tc.setupFiler(t)
// create directory "a/b/c" // create directory "a/b/c"
stdout, stderr := RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a", "b", "c")) stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a", "b", "c"))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
@ -90,7 +91,7 @@ func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// assert run is successful without any errors // assert run is successful without any errors
stdout, stderr := RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a")) stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a"))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
}) })
@ -110,7 +111,7 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// assert mkdir fails // assert mkdir fails
_, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello")) _, _, err = testcli.RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello"))
// Different cloud providers or cloud configurations return different errors. // Different cloud providers or cloud configurations return different errors.
regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$|(^|: )"The specified path already exists.".*$`) regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$|(^|: )"The specified path already exists.".*$`)
@ -127,7 +128,7 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// assert mkdir fails // assert mkdir fails
_, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello")) _, _, err = testcli.RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello"))
assert.ErrorAs(t, err, &filer.FileAlreadyExistsError{}) assert.ErrorAs(t, err, &filer.FileAlreadyExistsError{})
}) })

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -31,7 +32,7 @@ func TestAccFsRmFile(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
// Run rm command // Run rm command
stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "hello.txt")) stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "hello.txt"))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
@ -61,7 +62,7 @@ func TestAccFsRmEmptyDir(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
// Run rm command // Run rm command
stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "a")) stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "a"))
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
@ -95,7 +96,7 @@ func TestAccFsRmNonEmptyDirectory(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
// Run rm command // Run rm command
_, _, err = RequireErrorRun(t, "fs", "rm", path.Join(tmpDir, "a")) _, _, err = testcli.RequireErrorRun(t, "fs", "rm", path.Join(tmpDir, "a"))
assert.ErrorIs(t, err, fs.ErrInvalid) assert.ErrorIs(t, err, fs.ErrInvalid)
assert.ErrorAs(t, err, &filer.DirectoryNotEmptyError{}) assert.ErrorAs(t, err, &filer.DirectoryNotEmptyError{})
}) })
@ -114,7 +115,7 @@ func TestAccFsRmForNonExistentFile(t *testing.T) {
_, tmpDir := tc.setupFiler(t) _, tmpDir := tc.setupFiler(t)
// Expect error if file does not exist // Expect error if file does not exist
_, _, err := RequireErrorRun(t, "fs", "rm", path.Join(tmpDir, "does-not-exist")) _, _, err := testcli.RequireErrorRun(t, "fs", "rm", path.Join(tmpDir, "does-not-exist"))
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
}) })
} }
@ -144,7 +145,7 @@ func TestAccFsRmDirRecursively(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
// Run rm command // Run rm command
stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "a"), "--recursive") stdout, stderr := testcli.RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "a"), "--recursive")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())

View File

@ -8,6 +8,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dbr" "github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
@ -44,9 +45,9 @@ func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
targetPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-")) targetPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-"))
stdout, stderr := RequireSuccessfulRun(t, "repos", "create", examplesRepoUrl, examplesRepoProvider, "--path", targetPath) stdout, stderr := testcli.RequireSuccessfulRun(t, "repos", "create", examplesRepoUrl, examplesRepoProvider, "--path", targetPath)
t.Cleanup(func() { t.Cleanup(func() {
RequireSuccessfulRun(t, "repos", "delete", targetPath) testcli.RequireSuccessfulRun(t, "repos", "delete", targetPath)
}) })
assert.Empty(t, stderr.String()) assert.Empty(t, stderr.String())
@ -71,9 +72,9 @@ func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
rootPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-")) rootPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-"))
_, stderr := RequireSuccessfulRun(t, "workspace", "mkdirs", path.Join(rootPath, "a/b/c")) _, stderr := testcli.RequireSuccessfulRun(t, "workspace", "mkdirs", path.Join(rootPath, "a/b/c"))
t.Cleanup(func() { t.Cleanup(func() {
RequireSuccessfulRun(t, "workspace", "delete", "--recursive", rootPath) testcli.RequireSuccessfulRun(t, "workspace", "delete", "--recursive", rootPath)
}) })
assert.Empty(t, stderr.String()) assert.Empty(t, stderr.String())

View File

@ -1,30 +1,18 @@
package internal package internal
import ( import (
"bufio"
"bytes"
"context" "context"
"encoding/json"
"errors" "errors"
"fmt" "fmt"
"io"
"net/http" "net/http"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"reflect"
"strings" "strings"
"sync"
"time"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/cmd"
_ "github.com/databricks/cli/cmd/version"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
@ -33,305 +21,9 @@ import (
"github.com/databricks/databricks-sdk-go/service/files" "github.com/databricks/databricks-sdk-go/service/files"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
_ "github.com/databricks/cli/cmd/workspace"
) )
// Helper for running the root command in the background.
// It ensures that the background goroutine terminates upon
// test completion through cancelling the command context.
type cobraTestRunner struct {
testutil.TestingT
args []string
stdout bytes.Buffer
stderr bytes.Buffer
stdinR *io.PipeReader
stdinW *io.PipeWriter
ctx context.Context
// Line-by-line output.
// Background goroutines populate these channels by reading from stdout/stderr pipes.
stdoutLines <-chan string
stderrLines <-chan string
errch <-chan error
}
func consumeLines(ctx context.Context, wg *sync.WaitGroup, r io.Reader) <-chan string {
ch := make(chan string, 30000)
wg.Add(1)
go func() {
defer close(ch)
defer wg.Done()
scanner := bufio.NewScanner(r)
for scanner.Scan() {
// We expect to be able to always send these lines into the channel.
// If we can't, it means the channel is full and likely there is a problem
// in either the test or the code under test.
select {
case <-ctx.Done():
return
case ch <- scanner.Text():
continue
default:
panic("line buffer is full")
}
}
}()
return ch
}
func (t *cobraTestRunner) registerFlagCleanup(c *cobra.Command) {
// Find target command that will be run. Example: if the command run is `databricks fs cp`,
// target command corresponds to `cp`
targetCmd, _, err := c.Find(t.args)
if err != nil && strings.HasPrefix(err.Error(), "unknown command") {
// even if command is unknown, we can proceed
require.NotNil(t, targetCmd)
} else {
require.NoError(t, err)
}
// Force initialization of default flags.
// These are initialized by cobra at execution time and would otherwise
// not be cleaned up by the cleanup function below.
targetCmd.InitDefaultHelpFlag()
targetCmd.InitDefaultVersionFlag()
// Restore flag values to their original value on test completion.
targetCmd.Flags().VisitAll(func(f *pflag.Flag) {
v := reflect.ValueOf(f.Value)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
// Store copy of the current flag value.
reset := reflect.New(v.Type()).Elem()
reset.Set(v)
t.Cleanup(func() {
v.Set(reset)
})
})
}
// Like [cobraTestRunner.Eventually], but more specific
func (t *cobraTestRunner) WaitForTextPrinted(text string, timeout time.Duration) {
t.Eventually(func() bool {
currentStdout := t.stdout.String()
return strings.Contains(currentStdout, text)
}, timeout, 50*time.Millisecond)
}
func (t *cobraTestRunner) WaitForOutput(text string, timeout time.Duration) {
require.Eventually(t, func() bool {
currentStdout := t.stdout.String()
currentErrout := t.stderr.String()
return strings.Contains(currentStdout, text) || strings.Contains(currentErrout, text)
}, timeout, 50*time.Millisecond)
}
func (t *cobraTestRunner) WithStdin() {
reader, writer := io.Pipe()
t.stdinR = reader
t.stdinW = writer
}
func (t *cobraTestRunner) CloseStdin() {
if t.stdinW == nil {
panic("no standard input configured")
}
t.stdinW.Close()
}
func (t *cobraTestRunner) SendText(text string) {
if t.stdinW == nil {
panic("no standard input configured")
}
_, err := t.stdinW.Write([]byte(text + "\n"))
if err != nil {
panic("Failed to to write to t.stdinW")
}
}
func (t *cobraTestRunner) RunBackground() {
var stdoutR, stderrR io.Reader
var stdoutW, stderrW io.WriteCloser
stdoutR, stdoutW = io.Pipe()
stderrR, stderrW = io.Pipe()
ctx := cmdio.NewContext(t.ctx, &cmdio.Logger{
Mode: flags.ModeAppend,
Reader: bufio.Reader{},
Writer: stderrW,
})
cli := cmd.New(ctx)
cli.SetOut(stdoutW)
cli.SetErr(stderrW)
cli.SetArgs(t.args)
if t.stdinW != nil {
cli.SetIn(t.stdinR)
}
// Register cleanup function to restore flags to their original values
// once test has been executed. This is needed because flag values reside
// in a global singleton data-structure, and thus subsequent tests might
// otherwise interfere with each other
t.registerFlagCleanup(cli)
errch := make(chan error)
ctx, cancel := context.WithCancel(ctx)
// Tee stdout/stderr to buffers.
stdoutR = io.TeeReader(stdoutR, &t.stdout)
stderrR = io.TeeReader(stderrR, &t.stderr)
// Consume stdout/stderr line-by-line.
var wg sync.WaitGroup
t.stdoutLines = consumeLines(ctx, &wg, stdoutR)
t.stderrLines = consumeLines(ctx, &wg, stderrR)
// Run command in background.
go func() {
err := root.Execute(ctx, cli)
if err != nil {
t.Logf("Error running command: %s", err)
}
// Close pipes to signal EOF.
stdoutW.Close()
stderrW.Close()
// Wait for the [consumeLines] routines to finish now that
// the pipes they're reading from have closed.
wg.Wait()
if t.stdout.Len() > 0 {
// Make a copy of the buffer such that it remains "unread".
scanner := bufio.NewScanner(bytes.NewBuffer(t.stdout.Bytes()))
for scanner.Scan() {
t.Logf("[databricks stdout]: %s", scanner.Text())
}
}
if t.stderr.Len() > 0 {
// Make a copy of the buffer such that it remains "unread".
scanner := bufio.NewScanner(bytes.NewBuffer(t.stderr.Bytes()))
for scanner.Scan() {
t.Logf("[databricks stderr]: %s", scanner.Text())
}
}
// Reset context on command for the next test.
// These commands are globals so we have to clean up to the best of our ability after each run.
// See https://github.com/spf13/cobra/blob/a6f198b635c4b18fff81930c40d464904e55b161/command.go#L1062-L1066
//nolint:staticcheck // cobra sets the context and doesn't clear it
cli.SetContext(nil)
// Make caller aware of error.
errch <- err
close(errch)
}()
// Ensure command terminates upon test completion (success or failure).
t.Cleanup(func() {
// Signal termination of command.
cancel()
// Wait for goroutine to finish.
<-errch
})
t.errch = errch
}
func (t *cobraTestRunner) Run() (bytes.Buffer, bytes.Buffer, error) {
t.RunBackground()
err := <-t.errch
return t.stdout, t.stderr, err
}
// Like [require.Eventually] but errors if the underlying command has failed.
func (c *cobraTestRunner) Eventually(condition func() bool, waitFor, tick time.Duration, msgAndArgs ...any) {
ch := make(chan bool, 1)
timer := time.NewTimer(waitFor)
defer timer.Stop()
ticker := time.NewTicker(tick)
defer ticker.Stop()
// Kick off condition check immediately.
go func() { ch <- condition() }()
for tick := ticker.C; ; {
select {
case err := <-c.errch:
require.Fail(c, "Command failed", err)
return
case <-timer.C:
require.Fail(c, "Condition never satisfied", msgAndArgs...)
return
case <-tick:
tick = nil
go func() { ch <- condition() }()
case v := <-ch:
if v {
return
}
tick = ticker.C
}
}
}
func (t *cobraTestRunner) RunAndExpectOutput(heredoc string) {
stdout, _, err := t.Run()
require.NoError(t, err)
require.Equal(t, cmdio.Heredoc(heredoc), strings.TrimSpace(stdout.String()))
}
func (t *cobraTestRunner) RunAndParseJSON(v any) {
stdout, _, err := t.Run()
require.NoError(t, err)
err = json.Unmarshal(stdout.Bytes(), &v)
require.NoError(t, err)
}
func NewCobraTestRunner(t testutil.TestingT, args ...string) *cobraTestRunner {
return &cobraTestRunner{
TestingT: t,
ctx: context.Background(),
args: args,
}
}
func NewCobraTestRunnerWithContext(t testutil.TestingT, ctx context.Context, args ...string) *cobraTestRunner {
return &cobraTestRunner{
TestingT: t,
ctx: ctx,
args: args,
}
}
func RequireSuccessfulRun(t testutil.TestingT, args ...string) (bytes.Buffer, bytes.Buffer) {
t.Logf("run args: [%s]", strings.Join(args, ", "))
c := NewCobraTestRunner(t, args...)
stdout, stderr, err := c.Run()
require.NoError(t, err)
return stdout, stderr
}
func RequireErrorRun(t testutil.TestingT, args ...string) (bytes.Buffer, bytes.Buffer, error) {
c := NewCobraTestRunner(t, args...)
stdout, stderr, err := c.Run()
require.Error(t, err)
return stdout, stderr, err
}
func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0) tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ { for i := 0; i < len(versions); i++ {

View File

@ -11,6 +11,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/iamutil" "github.com/databricks/cli/libs/iamutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -22,7 +23,7 @@ func TestAccBundleInitErrorOnUnknownFields(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
_, _, err := RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir) _, _, err := testcli.RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir)
assert.EqualError(t, err, "failed to compute file content for bar.tmpl. variable \"does_not_exist\" not defined") assert.EqualError(t, err, "failed to compute file content for bar.tmpl. variable \"does_not_exist\" not defined")
} }
@ -63,7 +64,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
// Run bundle init // Run bundle init
assert.NoFileExists(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md")) assert.NoFileExists(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md"))
RequireSuccessfulRun(t, "bundle", "init", "mlops-stacks", "--output-dir", tmpDir2, "--config-file", filepath.Join(tmpDir1, "config.json")) testcli.RequireSuccessfulRun(t, "bundle", "init", "mlops-stacks", "--output-dir", tmpDir2, "--config-file", filepath.Join(tmpDir1, "config.json"))
// Assert that the README.md file was created // Assert that the README.md file was created
assert.FileExists(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md")) assert.FileExists(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md"))
@ -71,17 +72,17 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
// Validate the stack // Validate the stack
testutil.Chdir(t, filepath.Join(tmpDir2, "repo_name", projectName)) testutil.Chdir(t, filepath.Join(tmpDir2, "repo_name", projectName))
RequireSuccessfulRun(t, "bundle", "validate") testcli.RequireSuccessfulRun(t, "bundle", "validate")
// Deploy the stack // Deploy the stack
RequireSuccessfulRun(t, "bundle", "deploy") testcli.RequireSuccessfulRun(t, "bundle", "deploy")
t.Cleanup(func() { t.Cleanup(func() {
// Delete the stack // Delete the stack
RequireSuccessfulRun(t, "bundle", "destroy", "--auto-approve") testcli.RequireSuccessfulRun(t, "bundle", "destroy", "--auto-approve")
}) })
// Get summary of the bundle deployment // Get summary of the bundle deployment
stdout, _ := RequireSuccessfulRun(t, "bundle", "summary", "--output", "json") stdout, _ := testcli.RequireSuccessfulRun(t, "bundle", "summary", "--output", "json")
summary := &config.Root{} summary := &config.Root{}
err = json.Unmarshal(stdout.Bytes(), summary) err = json.Unmarshal(stdout.Bytes(), summary)
require.NoError(t, err) require.NoError(t, err)
@ -159,7 +160,7 @@ func TestAccBundleInitHelpers(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Run bundle init. // Run bundle init.
RequireSuccessfulRun(t, "bundle", "init", tmpDir, "--output-dir", tmpDir2) testcli.RequireSuccessfulRun(t, "bundle", "init", tmpDir, "--output-dir", tmpDir2)
// Assert that the helper function was correctly computed. // Assert that the helper function was correctly computed.
assertLocalFileContents(t, filepath.Join(tmpDir2, "foo.txt"), test.expected) assertLocalFileContents(t, filepath.Join(tmpDir2, "foo.txt"), test.expected)

View File

@ -6,6 +6,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -17,10 +18,10 @@ func TestAccCreateJob(t *testing.T) {
if env != "azure" { if env != "azure" {
t.Skipf("Not running test on cloud %s", env) t.Skipf("Not running test on cloud %s", env)
} }
stdout, stderr := RequireSuccessfulRun(t, "jobs", "create", "--json", "@testjsons/create_job_without_workers.json", "--log-level=debug") stdout, stderr := testcli.RequireSuccessfulRun(t, "jobs", "create", "--json", "@testjsons/create_job_without_workers.json", "--log-level=debug")
assert.Empty(t, stderr.String()) assert.Empty(t, stderr.String())
var output map[string]int var output map[string]int
err := json.Unmarshal(stdout.Bytes(), &output) err := json.Unmarshal(stdout.Bytes(), &output)
require.NoError(t, err) require.NoError(t, err)
RequireSuccessfulRun(t, "jobs", "delete", fmt.Sprint(output["job_id"]), "--log-level=debug") testcli.RequireSuccessfulRun(t, "jobs", "delete", fmt.Sprint(output["job_id"]), "--log-level=debug")
} }

View File

@ -6,6 +6,7 @@ import (
"strconv" "strconv"
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
@ -52,7 +53,7 @@ func TestAccReposCreateWithProvider(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
repoPath := synthesizeTemporaryRepoPath(t, w, ctx) repoPath := synthesizeTemporaryRepoPath(t, w, ctx)
_, stderr := RequireSuccessfulRun(t, "repos", "create", repoUrl, "gitHub", "--path", repoPath) _, stderr := testcli.RequireSuccessfulRun(t, "repos", "create", repoUrl, "gitHub", "--path", repoPath)
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
// Confirm the repo was created. // Confirm the repo was created.
@ -69,7 +70,7 @@ func TestAccReposCreateWithoutProvider(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
repoPath := synthesizeTemporaryRepoPath(t, w, ctx) repoPath := synthesizeTemporaryRepoPath(t, w, ctx)
_, stderr := RequireSuccessfulRun(t, "repos", "create", repoUrl, "--path", repoPath) _, stderr := testcli.RequireSuccessfulRun(t, "repos", "create", repoUrl, "--path", repoPath)
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
// Confirm the repo was created. // Confirm the repo was created.
@ -88,22 +89,22 @@ func TestAccReposGet(t *testing.T) {
repoId, repoPath := createTemporaryRepo(t, w, ctx) repoId, repoPath := createTemporaryRepo(t, w, ctx)
// Get by ID // Get by ID
byIdOutput, stderr := RequireSuccessfulRun(t, "repos", "get", strconv.FormatInt(repoId, 10), "--output=json") byIdOutput, stderr := testcli.RequireSuccessfulRun(t, "repos", "get", strconv.FormatInt(repoId, 10), "--output=json")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
// Get by path // Get by path
byPathOutput, stderr := RequireSuccessfulRun(t, "repos", "get", repoPath, "--output=json") byPathOutput, stderr := testcli.RequireSuccessfulRun(t, "repos", "get", repoPath, "--output=json")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
// Output should be the same // Output should be the same
assert.Equal(t, byIdOutput.String(), byPathOutput.String()) assert.Equal(t, byIdOutput.String(), byPathOutput.String())
// Get by path fails // Get by path fails
_, stderr, err = RequireErrorRun(t, "repos", "get", repoPath+"-doesntexist", "--output=json") _, stderr, err = testcli.RequireErrorRun(t, "repos", "get", repoPath+"-doesntexist", "--output=json")
assert.ErrorContains(t, err, "failed to look up repo") assert.ErrorContains(t, err, "failed to look up repo")
// Get by path resolves to something other than a repo // Get by path resolves to something other than a repo
_, stderr, err = RequireErrorRun(t, "repos", "get", "/Repos", "--output=json") _, stderr, err = testcli.RequireErrorRun(t, "repos", "get", "/Repos", "--output=json")
assert.ErrorContains(t, err, "is not a repo") assert.ErrorContains(t, err, "is not a repo")
} }
@ -117,11 +118,11 @@ func TestAccReposUpdate(t *testing.T) {
repoId, repoPath := createTemporaryRepo(t, w, ctx) repoId, repoPath := createTemporaryRepo(t, w, ctx)
// Update by ID // Update by ID
byIdOutput, stderr := RequireSuccessfulRun(t, "repos", "update", strconv.FormatInt(repoId, 10), "--branch", "ide") byIdOutput, stderr := testcli.RequireSuccessfulRun(t, "repos", "update", strconv.FormatInt(repoId, 10), "--branch", "ide")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
// Update by path // Update by path
byPathOutput, stderr := RequireSuccessfulRun(t, "repos", "update", repoPath, "--branch", "ide") byPathOutput, stderr := testcli.RequireSuccessfulRun(t, "repos", "update", repoPath, "--branch", "ide")
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
// Output should be the same // Output should be the same
@ -138,7 +139,7 @@ func TestAccReposDeleteByID(t *testing.T) {
repoId, _ := createTemporaryRepo(t, w, ctx) repoId, _ := createTemporaryRepo(t, w, ctx)
// Delete by ID // Delete by ID
stdout, stderr := RequireSuccessfulRun(t, "repos", "delete", strconv.FormatInt(repoId, 10)) stdout, stderr := testcli.RequireSuccessfulRun(t, "repos", "delete", strconv.FormatInt(repoId, 10))
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
@ -157,7 +158,7 @@ func TestAccReposDeleteByPath(t *testing.T) {
repoId, repoPath := createTemporaryRepo(t, w, ctx) repoId, repoPath := createTemporaryRepo(t, w, ctx)
// Delete by path // Delete by path
stdout, stderr := RequireSuccessfulRun(t, "repos", "delete", repoPath) stdout, stderr := testcli.RequireSuccessfulRun(t, "repos", "delete", repoPath)
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())

View File

@ -7,6 +7,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -14,7 +15,7 @@ import (
) )
func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) { func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) {
_, _, err := RequireErrorRun(t, "secrets", "create-scope") _, _, err := testcli.RequireErrorRun(t, "secrets", "create-scope")
assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0")
} }
@ -68,7 +69,7 @@ func TestAccSecretsPutSecretStringValue(tt *testing.T) {
key := "test-key" key := "test-key"
value := "test-value\nwith-newlines\n" value := "test-value\nwith-newlines\n"
stdout, stderr := RequireSuccessfulRun(t, "secrets", "put-secret", scope, key, "--string-value", value) stdout, stderr := testcli.RequireSuccessfulRun(t, "secrets", "put-secret", scope, key, "--string-value", value)
assert.Empty(t, stdout) assert.Empty(t, stdout)
assert.Empty(t, stderr) assert.Empty(t, stderr)
@ -82,7 +83,7 @@ func TestAccSecretsPutSecretBytesValue(tt *testing.T) {
key := "test-key" key := "test-key"
value := []byte{0x00, 0x01, 0x02, 0x03} value := []byte{0x00, 0x01, 0x02, 0x03}
stdout, stderr := RequireSuccessfulRun(t, "secrets", "put-secret", scope, key, "--bytes-value", string(value)) stdout, stderr := testcli.RequireSuccessfulRun(t, "secrets", "put-secret", scope, key, "--bytes-value", string(value))
assert.Empty(t, stdout) assert.Empty(t, stdout)
assert.Empty(t, stderr) assert.Empty(t, stderr)

View File

@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -14,7 +15,7 @@ func TestAccStorageCredentialsListRendersResponse(t *testing.T) {
// Check if metastore is assigned for the workspace, otherwise test will fail // Check if metastore is assigned for the workspace, otherwise test will fail
t.Log(testutil.GetEnvOrSkipTest(t, "TEST_METASTORE_ID")) t.Log(testutil.GetEnvOrSkipTest(t, "TEST_METASTORE_ID"))
stdout, stderr := RequireSuccessfulRun(t, "storage-credentials", "list") stdout, stderr := testcli.RequireSuccessfulRun(t, "storage-credentials", "list")
assert.NotEmpty(t, stdout) assert.NotEmpty(t, stdout)
assert.Empty(t, stderr) assert.Empty(t, stderr)
} }

View File

@ -15,7 +15,7 @@ import (
"testing" "testing"
"time" "time"
_ "github.com/databricks/cli/cmd/sync" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
@ -64,7 +64,7 @@ func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Contex
type syncTest struct { type syncTest struct {
t *testing.T t *testing.T
c *cobraTestRunner c *testcli.Runner
w *databricks.WorkspaceClient w *databricks.WorkspaceClient
f filer.Filer f filer.Filer
localRoot string localRoot string
@ -89,7 +89,7 @@ func setupSyncTest(t *testing.T, args ...string) *syncTest {
"json", "json",
}, args...) }, args...)
c := NewCobraTestRunner(t, args...) c := testcli.NewRunner(t, args...)
c.RunBackground() c.RunBackground()
return &syncTest{ return &syncTest{
@ -110,7 +110,7 @@ func (s *syncTest) waitForCompletionMarker() {
select { select {
case <-ctx.Done(): case <-ctx.Done():
s.t.Fatal("timed out waiting for sync to complete") s.t.Fatal("timed out waiting for sync to complete")
case line := <-s.c.stdoutLines: case line := <-s.c.StdoutLines:
var event sync.EventBase var event sync.EventBase
err := json.Unmarshal([]byte(line), &event) err := json.Unmarshal([]byte(line), &event)
require.NoError(s.t, err) require.NoError(s.t, err)

View File

@ -0,0 +1,7 @@
# testcli
This package provides a way to run the CLI from tests as if it were a separate process.
By running the CLI inline we can still set breakpoints and step through execution.
It transitively imports pretty much this entire repository, which is why we
intentionally keep this package _separate_ from `testutil`.

315
internal/testcli/runner.go Normal file
View File

@ -0,0 +1,315 @@
package testcli
import (
"bufio"
"bytes"
"context"
"encoding/json"
"io"
"reflect"
"strings"
"sync"
"time"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/stretchr/testify/require"
"github.com/databricks/cli/cmd"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
)
// Helper for running the root command in the background.
// It ensures that the background goroutine terminates upon
// test completion through cancelling the command context.
type Runner struct {
testutil.TestingT
args []string
stdout bytes.Buffer
stderr bytes.Buffer
stdinR *io.PipeReader
stdinW *io.PipeWriter
ctx context.Context
// Line-by-line output.
// Background goroutines populate these channels by reading from stdout/stderr pipes.
StdoutLines <-chan string
StderrLines <-chan string
errch <-chan error
}
func consumeLines(ctx context.Context, wg *sync.WaitGroup, r io.Reader) <-chan string {
ch := make(chan string, 30000)
wg.Add(1)
go func() {
defer close(ch)
defer wg.Done()
scanner := bufio.NewScanner(r)
for scanner.Scan() {
// We expect to be able to always send these lines into the channel.
// If we can't, it means the channel is full and likely there is a problem
// in either the test or the code under test.
select {
case <-ctx.Done():
return
case ch <- scanner.Text():
continue
default:
panic("line buffer is full")
}
}
}()
return ch
}
func (r *Runner) registerFlagCleanup(c *cobra.Command) {
// Find target command that will be run. Example: if the command run is `databricks fs cp`,
// target command corresponds to `cp`
targetCmd, _, err := c.Find(r.args)
if err != nil && strings.HasPrefix(err.Error(), "unknown command") {
// even if command is unknown, we can proceed
require.NotNil(r, targetCmd)
} else {
require.NoError(r, err)
}
// Force initialization of default flags.
// These are initialized by cobra at execution time and would otherwise
// not be cleaned up by the cleanup function below.
targetCmd.InitDefaultHelpFlag()
targetCmd.InitDefaultVersionFlag()
// Restore flag values to their original value on test completion.
targetCmd.Flags().VisitAll(func(f *pflag.Flag) {
v := reflect.ValueOf(f.Value)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
// Store copy of the current flag value.
reset := reflect.New(v.Type()).Elem()
reset.Set(v)
r.Cleanup(func() {
v.Set(reset)
})
})
}
// Like [Runner.Eventually], but more specific
func (r *Runner) WaitForTextPrinted(text string, timeout time.Duration) {
r.Eventually(func() bool {
currentStdout := r.stdout.String()
return strings.Contains(currentStdout, text)
}, timeout, 50*time.Millisecond)
}
func (r *Runner) WaitForOutput(text string, timeout time.Duration) {
require.Eventually(r, func() bool {
currentStdout := r.stdout.String()
currentErrout := r.stderr.String()
return strings.Contains(currentStdout, text) || strings.Contains(currentErrout, text)
}, timeout, 50*time.Millisecond)
}
func (r *Runner) WithStdin() {
reader, writer := io.Pipe()
r.stdinR = reader
r.stdinW = writer
}
func (r *Runner) CloseStdin() {
if r.stdinW == nil {
panic("no standard input configured")
}
r.stdinW.Close()
}
func (r *Runner) SendText(text string) {
if r.stdinW == nil {
panic("no standard input configured")
}
_, err := r.stdinW.Write([]byte(text + "\n"))
if err != nil {
panic("Failed to to write to t.stdinW")
}
}
func (r *Runner) RunBackground() {
var stdoutR, stderrR io.Reader
var stdoutW, stderrW io.WriteCloser
stdoutR, stdoutW = io.Pipe()
stderrR, stderrW = io.Pipe()
ctx := cmdio.NewContext(r.ctx, &cmdio.Logger{
Mode: flags.ModeAppend,
Reader: bufio.Reader{},
Writer: stderrW,
})
cli := cmd.New(ctx)
cli.SetOut(stdoutW)
cli.SetErr(stderrW)
cli.SetArgs(r.args)
if r.stdinW != nil {
cli.SetIn(r.stdinR)
}
// Register cleanup function to restore flags to their original values
// once test has been executed. This is needed because flag values reside
// in a global singleton data-structure, and thus subsequent tests might
// otherwise interfere with each other
r.registerFlagCleanup(cli)
errch := make(chan error)
ctx, cancel := context.WithCancel(ctx)
// Tee stdout/stderr to buffers.
stdoutR = io.TeeReader(stdoutR, &r.stdout)
stderrR = io.TeeReader(stderrR, &r.stderr)
// Consume stdout/stderr line-by-line.
var wg sync.WaitGroup
r.StdoutLines = consumeLines(ctx, &wg, stdoutR)
r.StderrLines = consumeLines(ctx, &wg, stderrR)
// Run command in background.
go func() {
err := root.Execute(ctx, cli)
if err != nil {
r.Logf("Error running command: %s", err)
}
// Close pipes to signal EOF.
stdoutW.Close()
stderrW.Close()
// Wait for the [consumeLines] routines to finish now that
// the pipes they're reading from have closed.
wg.Wait()
if r.stdout.Len() > 0 {
// Make a copy of the buffer such that it remains "unread".
scanner := bufio.NewScanner(bytes.NewBuffer(r.stdout.Bytes()))
for scanner.Scan() {
r.Logf("[databricks stdout]: %s", scanner.Text())
}
}
if r.stderr.Len() > 0 {
// Make a copy of the buffer such that it remains "unread".
scanner := bufio.NewScanner(bytes.NewBuffer(r.stderr.Bytes()))
for scanner.Scan() {
r.Logf("[databricks stderr]: %s", scanner.Text())
}
}
// Reset context on command for the next test.
// These commands are globals so we have to clean up to the best of our ability after each run.
// See https://github.com/spf13/cobra/blob/a6f198b635c4b18fff81930c40d464904e55b161/command.go#L1062-L1066
//nolint:staticcheck // cobra sets the context and doesn't clear it
cli.SetContext(nil)
// Make caller aware of error.
errch <- err
close(errch)
}()
// Ensure command terminates upon test completion (success or failure).
r.Cleanup(func() {
// Signal termination of command.
cancel()
// Wait for goroutine to finish.
<-errch
})
r.errch = errch
}
func (r *Runner) Run() (bytes.Buffer, bytes.Buffer, error) {
r.RunBackground()
err := <-r.errch
return r.stdout, r.stderr, err
}
// Like [require.Eventually] but errors if the underlying command has failed.
func (r *Runner) Eventually(condition func() bool, waitFor, tick time.Duration, msgAndArgs ...any) {
ch := make(chan bool, 1)
timer := time.NewTimer(waitFor)
defer timer.Stop()
ticker := time.NewTicker(tick)
defer ticker.Stop()
// Kick off condition check immediately.
go func() { ch <- condition() }()
for tick := ticker.C; ; {
select {
case err := <-r.errch:
require.Fail(r, "Command failed", err)
return
case <-timer.C:
require.Fail(r, "Condition never satisfied", msgAndArgs...)
return
case <-tick:
tick = nil
go func() { ch <- condition() }()
case v := <-ch:
if v {
return
}
tick = ticker.C
}
}
}
func (r *Runner) RunAndExpectOutput(heredoc string) {
stdout, _, err := r.Run()
require.NoError(r, err)
require.Equal(r, cmdio.Heredoc(heredoc), strings.TrimSpace(stdout.String()))
}
func (r *Runner) RunAndParseJSON(v any) {
stdout, _, err := r.Run()
require.NoError(r, err)
err = json.Unmarshal(stdout.Bytes(), &v)
require.NoError(r, err)
}
func NewRunner(t testutil.TestingT, args ...string) *Runner {
return &Runner{
TestingT: t,
ctx: context.Background(),
args: args,
}
}
func NewRunnerWithContext(t testutil.TestingT, ctx context.Context, args ...string) *Runner {
return &Runner{
TestingT: t,
ctx: ctx,
args: args,
}
}
func RequireSuccessfulRun(t testutil.TestingT, args ...string) (bytes.Buffer, bytes.Buffer) {
t.Logf("run args: [%s]", strings.Join(args, ", "))
r := NewRunner(t, args...)
stdout, stderr, err := r.Run()
require.NoError(t, err)
return stdout, stderr
}
func RequireErrorRun(t testutil.TestingT, args ...string) (bytes.Buffer, bytes.Buffer, error) {
r := NewRunner(t, args...)
stdout, stderr, err := r.Run()
require.Error(t, err)
return stdout, stderr, err
}

View File

@ -3,11 +3,12 @@ package internal
import ( import (
"testing" "testing"
"github.com/databricks/cli/internal/testcli"
assert "github.com/databricks/cli/libs/dyn/dynassert" assert "github.com/databricks/cli/libs/dyn/dynassert"
) )
func TestUnknownCommand(t *testing.T) { func TestUnknownCommand(t *testing.T) {
stdout, stderr, err := RequireErrorRun(t, "unknown-command") stdout, stderr, err := testcli.RequireErrorRun(t, "unknown-command")
assert.Error(t, err, "unknown command", `unknown command "unknown-command" for "databricks"`) assert.Error(t, err, "unknown command", `unknown command "unknown-command" for "databricks"`)
assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stdout.String())

View File

@ -6,31 +6,32 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/build" "github.com/databricks/cli/internal/build"
"github.com/databricks/cli/internal/testcli"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
var expectedVersion = fmt.Sprintf("Databricks CLI v%s\n", build.GetInfo().Version) var expectedVersion = fmt.Sprintf("Databricks CLI v%s\n", build.GetInfo().Version)
func TestVersionFlagShort(t *testing.T) { func TestVersionFlagShort(t *testing.T) {
stdout, stderr := RequireSuccessfulRun(t, "-v") stdout, stderr := testcli.RequireSuccessfulRun(t, "-v")
assert.Equal(t, expectedVersion, stdout.String()) assert.Equal(t, expectedVersion, stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
} }
func TestVersionFlagLong(t *testing.T) { func TestVersionFlagLong(t *testing.T) {
stdout, stderr := RequireSuccessfulRun(t, "--version") stdout, stderr := testcli.RequireSuccessfulRun(t, "--version")
assert.Equal(t, expectedVersion, stdout.String()) assert.Equal(t, expectedVersion, stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
} }
func TestVersionCommand(t *testing.T) { func TestVersionCommand(t *testing.T) {
stdout, stderr := RequireSuccessfulRun(t, "version") stdout, stderr := testcli.RequireSuccessfulRun(t, "version")
assert.Equal(t, expectedVersion, stdout.String()) assert.Equal(t, expectedVersion, stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
} }
func TestVersionCommandWithJSONOutput(t *testing.T) { func TestVersionCommandWithJSONOutput(t *testing.T) {
stdout, stderr := RequireSuccessfulRun(t, "version", "--output", "json") stdout, stderr := testcli.RequireSuccessfulRun(t, "version", "--output", "json")
assert.NotEmpty(t, stdout.String()) assert.NotEmpty(t, stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())

View File

@ -12,6 +12,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -23,7 +24,7 @@ import (
func TestAccWorkspaceList(t *testing.T) { func TestAccWorkspaceList(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/") stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "list", "/")
outStr := stdout.String() outStr := stdout.String()
assert.Contains(t, outStr, "ID") assert.Contains(t, outStr, "ID")
assert.Contains(t, outStr, "Type") assert.Contains(t, outStr, "Type")
@ -33,12 +34,12 @@ func TestAccWorkspaceList(t *testing.T) {
} }
func TestWorkpaceListErrorWhenNoArguments(t *testing.T) { func TestWorkpaceListErrorWhenNoArguments(t *testing.T) {
_, _, err := RequireErrorRun(t, "workspace", "list") _, _, err := testcli.RequireErrorRun(t, "workspace", "list")
assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0")
} }
func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) { func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) {
_, _, err := RequireErrorRun(t, "workspace", "get-status") _, _, err := testcli.RequireErrorRun(t, "workspace", "get-status")
assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0")
} }
@ -57,7 +58,7 @@ func TestAccWorkpaceExportPrintsContents(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Run export // Run export
stdout, stderr := RequireSuccessfulRun(t, "workspace", "export", path.Join(tmpdir, "file-a")) stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(tmpdir, "file-a"))
assert.Equal(t, contents, stdout.String()) assert.Equal(t, contents, stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
} }
@ -125,7 +126,7 @@ func TestAccExportDir(t *testing.T) {
}, "\n") }, "\n")
// Run Export // Run Export
stdout, stderr := RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir) stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir)
assert.Equal(t, expectedLogs, stdout.String()) assert.Equal(t, expectedLogs, stdout.String())
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
@ -153,7 +154,7 @@ func TestAccExportDirDoesNotOverwrite(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Run Export // Run Export
RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir) testcli.RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir)
// Assert file is not overwritten // Assert file is not overwritten
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "local content") assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "local content")
@ -174,7 +175,7 @@ func TestAccExportDirWithOverwriteFlag(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Run Export // Run Export
RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir, "--overwrite") testcli.RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir, "--overwrite")
// Assert file has been overwritten // Assert file has been overwritten
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace") assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace")
@ -182,7 +183,7 @@ func TestAccExportDirWithOverwriteFlag(t *testing.T) {
func TestAccImportDir(t *testing.T) { func TestAccImportDir(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
stdout, stderr := RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug") stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug")
expectedLogs := strings.Join([]string{ expectedLogs := strings.Join([]string{
fmt.Sprintf("Importing files from %s", "./testdata/import_dir"), fmt.Sprintf("Importing files from %s", "./testdata/import_dir"),
@ -223,7 +224,7 @@ func TestAccImportDirDoesNotOverwrite(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "old file") assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "old file")
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")")
RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir) testcli.RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir)
// Assert files are imported // Assert files are imported
assertFilerFileContents(t, ctx, workspaceFiler, "a/b/c/file-b", "file-in-dir") assertFilerFileContents(t, ctx, workspaceFiler, "a/b/c/file-b", "file-in-dir")
@ -251,7 +252,7 @@ func TestAccImportDirWithOverwriteFlag(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "old file") assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "old file")
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")")
RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--overwrite") testcli.RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--overwrite")
// Assert files are imported // Assert files are imported
assertFilerFileContents(t, ctx, workspaceFiler, "a/b/c/file-b", "file-in-dir") assertFilerFileContents(t, ctx, workspaceFiler, "a/b/c/file-b", "file-in-dir")
@ -273,7 +274,7 @@ func TestAccExport(t *testing.T) {
// Export vanilla file // Export vanilla file
err = f.Write(ctx, "file-a", strings.NewReader("abc")) err = f.Write(ctx, "file-a", strings.NewReader("abc"))
require.NoError(t, err) require.NoError(t, err)
stdout, _ := RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "file-a")) stdout, _ := testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "file-a"))
b, err := io.ReadAll(&stdout) b, err := io.ReadAll(&stdout)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, "abc", string(b)) assert.Equal(t, "abc", string(b))
@ -281,13 +282,13 @@ func TestAccExport(t *testing.T) {
// Export python notebook // Export python notebook
err = f.Write(ctx, "pyNotebook.py", strings.NewReader("# Databricks notebook source")) err = f.Write(ctx, "pyNotebook.py", strings.NewReader("# Databricks notebook source"))
require.NoError(t, err) require.NoError(t, err)
stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook")) stdout, _ = testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"))
b, err = io.ReadAll(&stdout) b, err = io.ReadAll(&stdout)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, "# Databricks notebook source\n", string(b)) assert.Equal(t, "# Databricks notebook source\n", string(b))
// Export python notebook as jupyter // Export python notebook as jupyter
stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER") stdout, _ = testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER")
b, err = io.ReadAll(&stdout) b, err = io.ReadAll(&stdout)
require.NoError(t, err) require.NoError(t, err)
assert.Contains(t, string(b), `"cells":`, "jupyter notebooks contain the cells field") assert.Contains(t, string(b), `"cells":`, "jupyter notebooks contain the cells field")
@ -303,7 +304,7 @@ func TestAccExportWithFileFlag(t *testing.T) {
// Export vanilla file // Export vanilla file
err = f.Write(ctx, "file-a", strings.NewReader("abc")) err = f.Write(ctx, "file-a", strings.NewReader("abc"))
require.NoError(t, err) require.NoError(t, err)
stdout, _ := RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "file-a"), "--file", filepath.Join(localTmpDir, "file.txt")) stdout, _ := testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "file-a"), "--file", filepath.Join(localTmpDir, "file.txt"))
b, err := io.ReadAll(&stdout) b, err := io.ReadAll(&stdout)
require.NoError(t, err) require.NoError(t, err)
// Expect nothing to be printed to stdout // Expect nothing to be printed to stdout
@ -313,14 +314,14 @@ func TestAccExportWithFileFlag(t *testing.T) {
// Export python notebook // Export python notebook
err = f.Write(ctx, "pyNotebook.py", strings.NewReader("# Databricks notebook source")) err = f.Write(ctx, "pyNotebook.py", strings.NewReader("# Databricks notebook source"))
require.NoError(t, err) require.NoError(t, err)
stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--file", filepath.Join(localTmpDir, "pyNb.py")) stdout, _ = testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--file", filepath.Join(localTmpDir, "pyNb.py"))
b, err = io.ReadAll(&stdout) b, err = io.ReadAll(&stdout)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, "", string(b)) assert.Equal(t, "", string(b))
assertLocalFileContents(t, filepath.Join(localTmpDir, "pyNb.py"), "# Databricks notebook source\n") assertLocalFileContents(t, filepath.Join(localTmpDir, "pyNb.py"), "# Databricks notebook source\n")
// Export python notebook as jupyter // Export python notebook as jupyter
stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER", "--file", filepath.Join(localTmpDir, "jupyterNb.ipynb")) stdout, _ = testcli.RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER", "--file", filepath.Join(localTmpDir, "jupyterNb.ipynb"))
b, err = io.ReadAll(&stdout) b, err = io.ReadAll(&stdout)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, "", string(b)) assert.Equal(t, "", string(b))
@ -332,13 +333,13 @@ func TestAccImportFileUsingContentFormatSource(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Content = `print(1)`. Uploaded as a notebook by default // Content = `print(1)`. Uploaded as a notebook by default
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyScript"), testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyScript"),
"--content", base64.StdEncoding.EncodeToString([]byte("print(1)")), "--language=PYTHON") "--content", base64.StdEncoding.EncodeToString([]byte("print(1)")), "--language=PYTHON")
assertFilerFileContents(t, ctx, workspaceFiler, "pyScript", "print(1)") assertFilerFileContents(t, ctx, workspaceFiler, "pyScript", "print(1)")
assertWorkspaceFileType(t, ctx, workspaceFiler, "pyScript", workspace.ObjectTypeNotebook) assertWorkspaceFileType(t, ctx, workspaceFiler, "pyScript", workspace.ObjectTypeNotebook)
// Import with content = `# Databricks notebook source\nprint(1)`. Uploaded as a notebook with the content just being print(1) // Import with content = `# Databricks notebook source\nprint(1)`. Uploaded as a notebook with the content just being print(1)
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNb"), testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNb"),
"--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")),
"--language=PYTHON") "--language=PYTHON")
assertFilerFileContents(t, ctx, workspaceFiler, "pyNb", "print(1)") assertFilerFileContents(t, ctx, workspaceFiler, "pyNb", "print(1)")
@ -349,19 +350,19 @@ func TestAccImportFileUsingContentFormatAuto(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Content = `# Databricks notebook source\nprint(1)`. Upload as file if path has no extension. // Content = `# Databricks notebook source\nprint(1)`. Upload as file if path has no extension.
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-file"),
"--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--format=AUTO") "--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--format=AUTO")
assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "# Databricks notebook source\nprint(1)") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "# Databricks notebook source\nprint(1)")
assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-file", workspace.ObjectTypeFile) assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-file", workspace.ObjectTypeFile)
// Content = `# Databricks notebook source\nprint(1)`. Upload as notebook if path has py extension // Content = `# Databricks notebook source\nprint(1)`. Upload as notebook if path has py extension
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"),
"--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--format=AUTO") "--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--format=AUTO")
assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-notebook", "# Databricks notebook source\nprint(1)") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-notebook", "# Databricks notebook source\nprint(1)")
assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-notebook", workspace.ObjectTypeNotebook) assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-notebook", workspace.ObjectTypeNotebook)
// Content = `print(1)`. Upload as file if content is not notebook (even if path has .py extension) // Content = `print(1)`. Upload as file if content is not notebook (even if path has .py extension)
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--content", testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--content",
base64.StdEncoding.EncodeToString([]byte("print(1)")), "--format=AUTO") base64.StdEncoding.EncodeToString([]byte("print(1)")), "--format=AUTO")
assertFilerFileContents(t, ctx, workspaceFiler, "not-a-notebook.py", "print(1)") assertFilerFileContents(t, ctx, workspaceFiler, "not-a-notebook.py", "print(1)")
assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile) assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile)
@ -369,15 +370,15 @@ func TestAccImportFileUsingContentFormatAuto(t *testing.T) {
func TestAccImportFileFormatSource(t *testing.T) { func TestAccImportFileFormatSource(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON") testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON")
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")")
assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNotebook", workspace.ObjectTypeNotebook) assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNotebook", workspace.ObjectTypeNotebook)
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala", "--language=SCALA") testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala", "--language=SCALA")
assertFilerFileContents(t, ctx, workspaceFiler, "scalaNotebook", "// Databricks notebook source\nprintln(\"scala\")") assertFilerFileContents(t, ctx, workspaceFiler, "scalaNotebook", "// Databricks notebook source\nprintln(\"scala\")")
assertWorkspaceFileType(t, ctx, workspaceFiler, "scalaNotebook", workspace.ObjectTypeNotebook) assertWorkspaceFileType(t, ctx, workspaceFiler, "scalaNotebook", workspace.ObjectTypeNotebook)
_, _, err := RequireErrorRun(t, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala") _, _, err := testcli.RequireErrorRun(t, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala")
assert.ErrorContains(t, err, "The zip file may not be valid or may be an unsupported version. Hint: Objects imported using format=SOURCE are expected to be zip encoded databricks source notebook(s) by default. Please specify a language using the --language flag if you are trying to import a single uncompressed notebook") assert.ErrorContains(t, err, "The zip file may not be valid or may be an unsupported version. Hint: Objects imported using format=SOURCE are expected to be zip encoded databricks source notebook(s) by default. Please specify a language using the --language flag if you are trying to import a single uncompressed notebook")
} }
@ -385,18 +386,18 @@ func TestAccImportFileFormatAuto(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Upload as file if path has no extension // Upload as file if path has no extension
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO") testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO")
assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "# Databricks notebook source") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "# Databricks notebook source")
assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "print(\"python\")") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "print(\"python\")")
assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-file", workspace.ObjectTypeFile) assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-file", workspace.ObjectTypeFile)
// Upload as notebook if path has extension // Upload as notebook if path has extension
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO") testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO")
assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-notebook", "# Databricks notebook source\nprint(\"python\")") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-notebook", "# Databricks notebook source\nprint(\"python\")")
assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-notebook", workspace.ObjectTypeNotebook) assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-notebook", workspace.ObjectTypeNotebook)
// Upload as file if content is not notebook (even if path has .py extension) // Upload as file if content is not notebook (even if path has .py extension)
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--file", "./testdata/import_dir/file-a", "--format=AUTO") testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--file", "./testdata/import_dir/file-a", "--format=AUTO")
assertFilerFileContents(t, ctx, workspaceFiler, "not-a-notebook.py", "hello, world") assertFilerFileContents(t, ctx, workspaceFiler, "not-a-notebook.py", "hello, world")
assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile) assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile)
} }