Debug filer tests

This commit is contained in:
Shreyas Goenka 2024-12-05 23:55:28 +05:30
parent 0ad790e468
commit ea3dcc5f3a
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
45 changed files with 148 additions and 148 deletions

View File

@ -123,7 +123,7 @@ Internal:
* Remove unused `IS_OWNER` constant ([#1823](https://github.com/databricks/cli/pull/1823)). * Remove unused `IS_OWNER` constant ([#1823](https://github.com/databricks/cli/pull/1823)).
* Assert SDK version is consistent in the CLI generation process ([#1814](https://github.com/databricks/cli/pull/1814)). * Assert SDK version is consistent in the CLI generation process ([#1814](https://github.com/databricks/cli/pull/1814)).
* Fixed unmarshalling json input into `interface{}` type ([#1832](https://github.com/databricks/cli/pull/1832)). * Fixed unmarshalling json input into `interface{}` type ([#1832](https://github.com/databricks/cli/pull/1832)).
* Fix `TestAccFsMkdirWhenFileExistsAtPath` in isolated Azure environments ([#1833](https://github.com/databricks/cli/pull/1833)). * Fix `TestFsMkdirWhenFileExistsAtPath` in isolated Azure environments ([#1833](https://github.com/databricks/cli/pull/1833)).
* Add behavioral tests for examples from the YAML spec ([#1835](https://github.com/databricks/cli/pull/1835)). * Add behavioral tests for examples from the YAML spec ([#1835](https://github.com/databricks/cli/pull/1835)).
* Remove Terraform conversion function that's no longer used ([#1840](https://github.com/databricks/cli/pull/1840)). * Remove Terraform conversion function that's no longer used ([#1840](https://github.com/databricks/cli/pull/1840)).
* Encode assumptions about the dashboards API in a test ([#1839](https://github.com/databricks/cli/pull/1839)). * Encode assumptions about the dashboards API in a test ([#1839](https://github.com/databricks/cli/pull/1839)).
@ -1680,7 +1680,7 @@ Internal:
* Remove dependency on global state for the root command ([#606](https://github.com/databricks/cli/pull/606)). * Remove dependency on global state for the root command ([#606](https://github.com/databricks/cli/pull/606)).
* Add merge_group trigger for build ([#612](https://github.com/databricks/cli/pull/612)). * Add merge_group trigger for build ([#612](https://github.com/databricks/cli/pull/612)).
* Added support for build command chaining and error on missing wheel ([#607](https://github.com/databricks/cli/pull/607)). * Added support for build command chaining and error on missing wheel ([#607](https://github.com/databricks/cli/pull/607)).
* Add TestAcc prefix to filer test and fix any failing tests ([#611](https://github.com/databricks/cli/pull/611)). * Add Test prefix to filer test and fix any failing tests ([#611](https://github.com/databricks/cli/pull/611)).
* Add url parse helper function for templates ([#600](https://github.com/databricks/cli/pull/600)). * Add url parse helper function for templates ([#600](https://github.com/databricks/cli/pull/600)).
* Remove dependency on global state for remaining commands ([#613](https://github.com/databricks/cli/pull/613)). * Remove dependency on global state for remaining commands ([#613](https://github.com/databricks/cli/pull/613)).
* Update CHANGELOG template ([#588](https://github.com/databricks/cli/pull/588)). * Update CHANGELOG template ([#588](https://github.com/databricks/cli/pull/588)).

View File

@ -65,7 +65,7 @@ func expectReturns(t *testing.T, fn promptFn, config *config.Config) {
require.NotNil(t, client) require.NotNil(t, client)
} }
func TestAccountClientOrPrompt(t *testing.T) { func TestountClientOrPrompt(t *testing.T) {
testutil.CleanupEnvironment(t) testutil.CleanupEnvironment(t)
dir := t.TempDir() dir := t.TempDir()

View File

@ -6,7 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) { func TestAlertsCreateErrWhenNoArguments(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "alerts-legacy", "create") _, _, err := RequireErrorRun(t, "alerts-legacy", "create")

View File

@ -12,7 +12,7 @@ import (
_ "github.com/databricks/cli/cmd/api" _ "github.com/databricks/cli/cmd/api"
) )
func TestAccApiGet(t *testing.T) { func TestApiGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me") stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me")
@ -27,7 +27,7 @@ func TestAccApiGet(t *testing.T) {
assert.NotNil(t, out["id"]) assert.NotNil(t, out["id"])
} }
func TestAccApiPost(t *testing.T) { func TestApiPost(t *testing.T) {
env := GetEnvOrSkipTest(t, "CLOUD_ENV") env := GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
if env == "gcp" { if env == "gcp" {

View File

@ -30,7 +30,7 @@ func touchEmptyFile(t *testing.T, path string) {
f.Close() f.Close()
} }
func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W
dir := t.TempDir() dir := t.TempDir()
@ -95,7 +95,7 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
) )
} }
func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) { func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W
dir := t.TempDir() dir := t.TempDir()
@ -160,7 +160,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T)
) )
} }
func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) { func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W
@ -230,7 +230,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
) )
} }
func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { func TestUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
@ -267,7 +267,7 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stderr.String())
} }
func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) { func TestUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccBasicBundleDeployWithFailOnActiveRuns(t *testing.T) { func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
ctx, _ := acc.WorkspaceTest(t) ctx, _ := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))

View File

@ -15,7 +15,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccBindJobToExistingJob(t *testing.T) { func TestBindJobToExistingJob(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
@ -80,7 +80,7 @@ func TestAccBindJobToExistingJob(t *testing.T) {
require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py")
} }
func TestAccAbortBind(t *testing.T) { func TestAbortBind(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
@ -128,7 +128,7 @@ func TestAccAbortBind(t *testing.T) {
require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test") require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test")
} }
func TestAccGenerateAndBind(t *testing.T) { func TestGenerateAndBind(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccDeployBundleWithCluster(t *testing.T) { func TestDeployBundleWithCluster(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt.T) { if testutil.IsAWSCloud(wt.T) {

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccDashboards(t *testing.T) { func TestDashboards(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
warehouseID := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") warehouseID := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")

View File

@ -80,7 +80,7 @@ func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.Worksp
return bundleRoot return bundleRoot
} }
func TestAccBundleDeployUcSchema(t *testing.T) { func TestBundleDeployUcSchema(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
@ -105,7 +105,7 @@ func TestAccBundleDeployUcSchema(t *testing.T) {
assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode) assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode)
} }
func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) { func TestBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
@ -127,7 +127,7 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
} }
func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) { func TestBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W
@ -176,7 +176,7 @@ properties such as the 'catalog' or 'storage' are changed:
} }
func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) { func TestBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
uniqueId := uuid.New().String() uniqueId := uuid.New().String()
@ -214,7 +214,7 @@ properties such as the 'catalog' or 'storage' are changed:
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
} }
func TestAccDeployBasicBundleLogs(t *testing.T) { func TestDeployBasicBundleLogs(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
@ -244,7 +244,7 @@ func TestAccDeployBasicBundleLogs(t *testing.T) {
assert.Equal(t, "", stdout) assert.Equal(t, "", stdout)
} }
func TestAccDeployUcVolume(t *testing.T) { func TestDeployUcVolume(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccBundleDeployThenRemoveResources(t *testing.T) { func TestBundleDeployThenRemoveResources(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccDeployBasicToSharedWorkspacePath(t *testing.T) { func TestDeployBasicToSharedWorkspacePath(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) { func TestFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)

View File

@ -15,7 +15,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccBundleDestroy(t *testing.T) { func TestBundleDestroy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccEmptyBundleDeploy(t *testing.T) { func TestEmptyBundleDeploy(t *testing.T) {
ctx, _ := acc.WorkspaceTest(t) ctx, _ := acc.WorkspaceTest(t)
// create empty bundle // create empty bundle

View File

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccPythonWheelTaskWithEnvironmentsDeployAndRun(t *testing.T) { func TestPythonWheelTaskWithEnvironmentsDeployAndRun(t *testing.T) {
t.Skip("Skipping test until serveless is enabled") t.Skip("Skipping test until serveless is enabled")
ctx, _ := acc.WorkspaceTest(t) ctx, _ := acc.WorkspaceTest(t)

View File

@ -20,7 +20,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) { func TestGenerateFromExistingJobAndDeploy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: t, w: wt.W} gt := &generateJobTest{T: t, w: wt.W}

View File

@ -18,7 +18,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) { func TestGenerateFromExistingPipelineAndDeploy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
gt := &generatePipelineTest{T: t, w: wt.W} gt := &generatePipelineTest{T: t, w: wt.W}

View File

@ -19,7 +19,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccJobsMetadataFile(t *testing.T) { func TestJobsMetadataFile(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccLocalStateStaleness(t *testing.T) { func TestLocalStateStaleness(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
w := wt.W w := wt.W

View File

@ -45,15 +45,15 @@ func runPythonWheelTest(t *testing.T, templateName string, sparkVersion string,
require.Contains(t, out, "['my_test_code', 'param1', 'param2']") require.Contains(t, out, "['my_test_code', 'param1', 'param2']")
} }
func TestAccPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) { func TestPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) {
runPythonWheelTest(t, "python_wheel_task", "13.3.x-snapshot-scala2.12", false) runPythonWheelTest(t, "python_wheel_task", "13.3.x-snapshot-scala2.12", false)
} }
func TestAccPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) { func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
runPythonWheelTest(t, "python_wheel_task", "12.2.x-scala2.12", true) runPythonWheelTest(t, "python_wheel_task", "12.2.x-scala2.12", true)
} }
func TestAccPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) { func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
_, wt := acc.WorkspaceTest(t) _, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt.T) { if testutil.IsAWSCloud(wt.T) {

View File

@ -52,7 +52,7 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
runSparkJarTestCommon(t, ctx, sparkVersion, "n/a") runSparkJarTestCommon(t, ctx, sparkVersion, "n/a")
} }
func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0") testutil.RequireJDK(t, context.Background(), "1.8.0")
@ -76,7 +76,7 @@ func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
} }
} }
func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) { func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0") testutil.RequireJDK(t, context.Background(), "1.8.0")

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccBundleValidate(t *testing.T) { func TestBundleValidate(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
tmpDir := t.TempDir() tmpDir := t.TempDir()

View File

@ -10,7 +10,7 @@ import (
var clusterId string var clusterId string
func TestAccClustersList(t *testing.T) { func TestClustersList(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "clusters", "list") stdout, stderr := RequireSuccessfulRun(t, "clusters", "list")
@ -25,7 +25,7 @@ func TestAccClustersList(t *testing.T) {
assert.NotEmpty(t, clusterId) assert.NotEmpty(t, clusterId)
} }
func TestAccClustersGet(t *testing.T) { func TestClustersGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId) stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId)

View File

@ -17,7 +17,7 @@ func setupCompletionFile(t *testing.T, f filer.Filer) {
require.NoError(t, err) require.NoError(t, err)
} }
func TestAccFsCompletion(t *testing.T) { func TestFsCompletion(t *testing.T) {
f, tmpDir := setupDbfsFiler(t) f, tmpDir := setupDbfsFiler(t)
setupCompletionFile(t, f) setupCompletionFile(t, f)

View File

@ -18,7 +18,7 @@ import (
// Verify that importing a dashboard through the Workspace API retains the identity of the underying resource, // Verify that importing a dashboard through the Workspace API retains the identity of the underying resource,
// as well as properties exclusively accessible through the dashboards API. // as well as properties exclusively accessible through the dashboards API.
func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) { func TestDashboardAssumptions_WorkspaceImport(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
t.Parallel() t.Parallel()

View File

@ -116,7 +116,7 @@ func commonFilerRecursiveDeleteTest(t *testing.T, ctx context.Context, f filer.F
assert.ErrorAs(t, err, &filer.NoSuchDirectoryError{}) assert.ErrorAs(t, err, &filer.NoSuchDirectoryError{})
} }
func TestAccFilerRecursiveDelete(t *testing.T) { func TestFilerRecursiveDelete(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range []struct { for _, testCase := range []struct {
@ -227,7 +227,7 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
assert.True(t, errors.Is(err, fs.ErrInvalid)) assert.True(t, errors.Is(err, fs.ErrInvalid))
} }
func TestAccFilerReadWrite(t *testing.T) { func TestFilerReadWrite(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range []struct { for _, testCase := range []struct {
@ -336,7 +336,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
assert.False(t, entries[0].IsDir()) assert.False(t, entries[0].IsDir())
} }
func TestAccFilerReadDir(t *testing.T) { func TestFilerReadDir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range []struct { for _, testCase := range []struct {
@ -361,7 +361,7 @@ func TestAccFilerReadDir(t *testing.T) {
} }
} }
func TestAccFilerWorkspaceNotebook(t *testing.T) { func TestFilerWorkspaceNotebook(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -471,7 +471,7 @@ func TestAccFilerWorkspaceNotebook(t *testing.T) {
} }
func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) { func TestFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
t.Parallel() t.Parallel()
files := []struct { files := []struct {
@ -575,7 +575,7 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer {
return wf return wf
} }
func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) { func TestFilerWorkspaceFilesExtensionsRead(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -612,7 +612,7 @@ func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) {
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
} }
func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) { func TestFilerWorkspaceFilesExtensionsDelete(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -661,7 +661,7 @@ func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) {
filerTest{t, wf}.assertNotExists(ctx, "dir") filerTest{t, wf}.assertNotExists(ctx, "dir")
} }
func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) { func TestFilerWorkspaceFilesExtensionsStat(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -708,7 +708,7 @@ func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) {
} }
} }
func TestAccWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) { func TestWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -723,7 +723,7 @@ func TestAccWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) {
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
} }
func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) { func TestWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -742,7 +742,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
} }
func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) { func TestWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) {
t.Parallel() t.Parallel()
ctx := context.Background() ctx := context.Background()
@ -780,7 +780,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T)
assert.NoError(t, err) assert.NoError(t, err)
} }
func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) { func TestWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
t.Parallel() t.Parallel()
// Case 1: Writing source notebooks. // Case 1: Writing source notebooks.

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccFsCat(t *testing.T) { func TestFsCat(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -33,7 +33,7 @@ func TestAccFsCat(t *testing.T) {
} }
} }
func TestAccFsCatOnADir(t *testing.T) { func TestFsCatOnADir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -52,7 +52,7 @@ func TestAccFsCatOnADir(t *testing.T) {
} }
} }
func TestAccFsCatOnNonExistentFile(t *testing.T) { func TestFsCatOnNonExistentFile(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -69,14 +69,14 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) {
} }
} }
func TestAccFsCatForDbfsInvalidScheme(t *testing.T) { func TestFsCatForDbfsInvalidScheme(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file") _, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
assert.ErrorContains(t, err, "invalid scheme: dab") assert.ErrorContains(t, err, "invalid scheme: dab")
} }
func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) { func TestFsCatDoesNotSupportOutputModeJson(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()

View File

@ -120,7 +120,7 @@ func copyTests() []cpTest {
} }
} }
func TestAccFsCpDir(t *testing.T) { func TestFsCpDir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -140,7 +140,7 @@ func TestAccFsCpDir(t *testing.T) {
} }
} }
func TestAccFsCpFileToFile(t *testing.T) { func TestFsCpFileToFile(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -160,7 +160,7 @@ func TestAccFsCpFileToFile(t *testing.T) {
} }
} }
func TestAccFsCpFileToDir(t *testing.T) { func TestFsCpFileToDir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -180,7 +180,7 @@ func TestAccFsCpFileToDir(t *testing.T) {
} }
} }
func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) { func TestFsCpFileToDirForWindowsPaths(t *testing.T) {
if runtime.GOOS != "windows" { if runtime.GOOS != "windows" {
t.Skip("Skipping test on non-windows OS") t.Skip("Skipping test on non-windows OS")
} }
@ -196,7 +196,7 @@ func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) {
assertTargetFile(t, ctx, targetFiler, "foo.txt") assertTargetFile(t, ctx, targetFiler, "foo.txt")
} }
func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) { func TestFsCpDirToDirFileNotOverwritten(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -221,7 +221,7 @@ func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) {
} }
} }
func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) { func TestFsCpFileToDirFileNotOverwritten(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -244,7 +244,7 @@ func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) {
} }
} }
func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) { func TestFsCpFileToFileFileNotOverwritten(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -267,7 +267,7 @@ func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) {
} }
} }
func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) { func TestFsCpDirToDirWithOverwriteFlag(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -290,7 +290,7 @@ func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) {
} }
} }
func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) { func TestFsCpFileToFileWithOverwriteFlag(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -313,7 +313,7 @@ func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) {
} }
} }
func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) { func TestFsCpFileToDirWithOverwriteFlag(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {
@ -336,7 +336,7 @@ func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) {
} }
} }
func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) { func TestFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -354,14 +354,14 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
} }
} }
func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) { func TestFsCpErrorsOnInvalidScheme(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b") _, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b")
assert.Equal(t, "invalid scheme: https", err.Error()) assert.Equal(t, "invalid scheme: https", err.Error())
} }
func TestAccFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) { func TestFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range copyTests() { for _, testCase := range copyTests() {

View File

@ -38,7 +38,7 @@ func setupLsFiles(t *testing.T, f filer.Filer) {
require.NoError(t, err) require.NoError(t, err)
} }
func TestAccFsLs(t *testing.T) { func TestFsLs(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -71,7 +71,7 @@ func TestAccFsLs(t *testing.T) {
} }
} }
func TestAccFsLsWithAbsolutePaths(t *testing.T) { func TestFsLsWithAbsolutePaths(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -104,7 +104,7 @@ func TestAccFsLsWithAbsolutePaths(t *testing.T) {
} }
} }
func TestAccFsLsOnFile(t *testing.T) { func TestFsLsOnFile(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -122,7 +122,7 @@ func TestAccFsLsOnFile(t *testing.T) {
} }
} }
func TestAccFsLsOnEmptyDir(t *testing.T) { func TestFsLsOnEmptyDir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -145,7 +145,7 @@ func TestAccFsLsOnEmptyDir(t *testing.T) {
} }
} }
func TestAccFsLsForNonexistingDir(t *testing.T) { func TestFsLsForNonexistingDir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -163,7 +163,7 @@ func TestAccFsLsForNonexistingDir(t *testing.T) {
} }
} }
func TestAccFsLsWithoutScheme(t *testing.T) { func TestFsLsWithoutScheme(t *testing.T) {
t.Parallel() t.Parallel()
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccFsMkdir(t *testing.T) { func TestFsMkdir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -37,7 +37,7 @@ func TestAccFsMkdir(t *testing.T) {
} }
} }
func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) { func TestFsMkdirCreatesIntermediateDirectories(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -74,7 +74,7 @@ func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) {
} }
} }
func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -97,7 +97,7 @@ func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) {
} }
} }
func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { func TestFsMkdirWhenFileExistsAtPath(t *testing.T) {
t.Parallel() t.Parallel()
t.Run("dbfs", func(t *testing.T) { t.Run("dbfs", func(t *testing.T) {

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccFsRmFile(t *testing.T) { func TestFsRmFile(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -42,7 +42,7 @@ func TestAccFsRmFile(t *testing.T) {
} }
} }
func TestAccFsRmEmptyDir(t *testing.T) { func TestFsRmEmptyDir(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -72,7 +72,7 @@ func TestAccFsRmEmptyDir(t *testing.T) {
} }
} }
func TestAccFsRmNonEmptyDirectory(t *testing.T) { func TestFsRmNonEmptyDirectory(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -102,7 +102,7 @@ func TestAccFsRmNonEmptyDirectory(t *testing.T) {
} }
} }
func TestAccFsRmForNonExistentFile(t *testing.T) { func TestFsRmForNonExistentFile(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {
@ -121,7 +121,7 @@ func TestAccFsRmForNonExistentFile(t *testing.T) {
} }
func TestAccFsRmDirRecursively(t *testing.T) { func TestFsRmDirRecursively(t *testing.T) {
t.Parallel() t.Parallel()
for _, testCase := range fsTests { for _, testCase := range fsTests {

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestAccGitClone(t *testing.T) { func TestGitClone(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
@ -32,7 +32,7 @@ func TestAccGitClone(t *testing.T) {
assert.Contains(t, string(b), "ide") assert.Contains(t, string(b), "ide")
} }
func TestAccGitCloneOnNonDefaultBranch(t *testing.T) { func TestGitCloneOnNonDefaultBranch(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
@ -53,7 +53,7 @@ func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
assert.Contains(t, string(b), "dais-2022") assert.Contains(t, string(b), "dais-2022")
} }
func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) { func TestGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()

View File

@ -35,7 +35,7 @@ func assertSparseGitInfo(t *testing.T, expectedRoot string, info git.RepositoryI
assert.Equal(t, expectedRoot, info.WorktreeRoot) assert.Equal(t, expectedRoot, info.WorktreeRoot)
} }
func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) { func TestFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
me, err := wt.W.CurrentUser.Me(ctx) me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
@ -62,7 +62,7 @@ func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
} }
} }
func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) { func TestFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
me, err := wt.W.CurrentUser.Me(ctx) me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
@ -108,7 +108,7 @@ func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
} }
} }
func TestAccFetchRepositoryInfoDotGit_FromGitRepo(t *testing.T) { func TestFetchRepositoryInfoDotGit_FromGitRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
repo := cloneRepoLocally(t, examplesRepoUrl) repo := cloneRepoLocally(t, examplesRepoUrl)
@ -135,7 +135,7 @@ func cloneRepoLocally(t *testing.T, repoUrl string) string {
return localRoot return localRoot
} }
func TestAccFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) { func TestFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
tempDir := t.TempDir() tempDir := t.TempDir()
@ -157,7 +157,7 @@ func TestAccFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) {
} }
} }
func TestAccFetchRepositoryInfoDotGit_FromBrokenGitRepo(t *testing.T) { func TestFetchRepositoryInfoDotGit_FromBrokenGitRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
tempDir := t.TempDir() tempDir := t.TempDir()

View File

@ -18,7 +18,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccBundleInitErrorOnUnknownFields(t *testing.T) { func TestBundleInitErrorOnUnknownFields(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
@ -38,7 +38,7 @@ func TestAccBundleInitErrorOnUnknownFields(t *testing.T) {
// 2. While rare and to be avoided if possible, the CLI reserves the right to // 2. While rare and to be avoided if possible, the CLI reserves the right to
// make changes that can break the MLOps Stacks DAB. In which case we should // make changes that can break the MLOps Stacks DAB. In which case we should
// skip this test until the MLOps Stacks DAB is updated to work again. // skip this test until the MLOps Stacks DAB is updated to work again.
func TestAccBundleInitOnMlopsStacks(t *testing.T) { func TestBundleInitOnMlopsStacks(t *testing.T) {
t.Parallel() t.Parallel()
env := testutil.GetCloud(t).String() env := testutil.GetCloud(t).String()
@ -100,7 +100,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
assert.Contains(t, job.Settings.Name, fmt.Sprintf("dev-%s-batch-inference-job", projectName)) assert.Contains(t, job.Settings.Name, fmt.Sprintf("dev-%s-batch-inference-job", projectName))
} }
func TestAccBundleInitHelpers(t *testing.T) { func TestBundleInitHelpers(t *testing.T) {
env := GetEnvOrSkipTest(t, "CLOUD_ENV") env := GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccCreateJob(t *testing.T) { func TestCreateJob(t *testing.T) {
acc.WorkspaceTest(t) acc.WorkspaceTest(t)
env := GetEnvOrSkipTest(t, "CLOUD_ENV") env := GetEnvOrSkipTest(t, "CLOUD_ENV")
if env != "azure" { if env != "azure" {

View File

@ -43,7 +43,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
return remoteProjectRoot return remoteProjectRoot
} }
func TestAccLock(t *testing.T) { func TestLock(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.TODO() ctx := context.TODO()
wsc, err := databricks.NewWorkspaceClient() wsc, err := databricks.NewWorkspaceClient()
@ -180,7 +180,7 @@ func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.
return locker, f return locker, f
} }
func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) { func TestLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
ctx := context.Background() ctx := context.Background()
locker, f := setupLockerTest(ctx, t) locker, f := setupLockerTest(ctx, t)
var err error var err error
@ -202,7 +202,7 @@ func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
} }
func TestAccLockUnlockWithAllowsLockFileNotExist(t *testing.T) { func TestLockUnlockWithAllowsLockFileNotExist(t *testing.T) {
ctx := context.Background() ctx := context.Background()
locker, f := setupLockerTest(ctx, t) locker, f := setupLockerTest(ctx, t)
var err error var err error

View File

@ -73,7 +73,7 @@ var sparkVersions = []string{
"14.1.x-scala2.12", "14.1.x-scala2.12",
} }
func TestAccRunPythonTaskWorkspace(t *testing.T) { func TestRunPythonTaskWorkspace(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
@ -94,7 +94,7 @@ func TestAccRunPythonTaskWorkspace(t *testing.T) {
}) })
} }
func TestAccRunPythonTaskDBFS(t *testing.T) { func TestRunPythonTaskDBFS(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
@ -107,7 +107,7 @@ func TestAccRunPythonTaskDBFS(t *testing.T) {
}) })
} }
func TestAccRunPythonTaskRepo(t *testing.T) { func TestRunPythonTaskRepo(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") internal.GetEnvOrSkipTest(t, "CLOUD_ENV")

View File

@ -43,7 +43,7 @@ func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx contex
return repoInfo.Id, repoPath return repoInfo.Id, repoPath
} }
func TestAccReposCreateWithProvider(t *testing.T) { func TestReposCreateWithProvider(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
@ -60,7 +60,7 @@ func TestAccReposCreateWithProvider(t *testing.T) {
assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType) assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType)
} }
func TestAccReposCreateWithoutProvider(t *testing.T) { func TestReposCreateWithoutProvider(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
@ -77,7 +77,7 @@ func TestAccReposCreateWithoutProvider(t *testing.T) {
assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType) assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType)
} }
func TestAccReposGet(t *testing.T) { func TestReposGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
@ -106,7 +106,7 @@ func TestAccReposGet(t *testing.T) {
assert.ErrorContains(t, err, "is not a repo") assert.ErrorContains(t, err, "is not a repo")
} }
func TestAccReposUpdate(t *testing.T) { func TestReposUpdate(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
@ -127,7 +127,7 @@ func TestAccReposUpdate(t *testing.T) {
assert.Equal(t, byIdOutput.String(), byPathOutput.String()) assert.Equal(t, byIdOutput.String(), byPathOutput.String())
} }
func TestAccReposDeleteByID(t *testing.T) { func TestReposDeleteByID(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
@ -146,7 +146,7 @@ func TestAccReposDeleteByID(t *testing.T) {
assert.True(t, apierr.IsMissing(err), err) assert.True(t, apierr.IsMissing(err), err)
} }
func TestAccReposDeleteByPath(t *testing.T) { func TestReposDeleteByPath(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()

View File

@ -61,7 +61,7 @@ func assertSecretBytesValue(t *acc.WorkspaceT, scope, key string, expected []byt
assert.Equal(t, expected, decoded) assert.Equal(t, expected, decoded)
} }
func TestAccSecretsPutSecretStringValue(tt *testing.T) { func TestSecretsPutSecretStringValue(tt *testing.T) {
ctx, t := acc.WorkspaceTest(tt) ctx, t := acc.WorkspaceTest(tt)
scope := temporarySecretScope(ctx, t) scope := temporarySecretScope(ctx, t)
key := "test-key" key := "test-key"
@ -75,7 +75,7 @@ func TestAccSecretsPutSecretStringValue(tt *testing.T) {
assertSecretBytesValue(t, scope, key, []byte(value)) assertSecretBytesValue(t, scope, key, []byte(value))
} }
func TestAccSecretsPutSecretBytesValue(tt *testing.T) { func TestSecretsPutSecretBytesValue(tt *testing.T) {
ctx, t := acc.WorkspaceTest(tt) ctx, t := acc.WorkspaceTest(tt)
scope := temporarySecretScope(ctx, t) scope := temporarySecretScope(ctx, t)
key := "test-key" key := "test-key"

View File

@ -7,7 +7,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestAccStorageCredentialsListRendersResponse(t *testing.T) { func TestStorageCredentialsListRendersResponse(t *testing.T) {
_, _ = acc.WorkspaceTest(t) _, _ = acc.WorkspaceTest(t)
// Check if metastore is assigned for the workspace, otherwise test will fail // Check if metastore is assigned for the workspace, otherwise test will fail

View File

@ -228,7 +228,7 @@ func (a *syncTest) snapshotContains(files []string) {
assert.Equal(a.t, len(files), len(s.LastModifiedTimes)) assert.Equal(a.t, len(files), len(s.LastModifiedTimes))
} }
func TestAccSyncFullFileSync(t *testing.T) { func TestSyncFullFileSync(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--full", "--watch") assertSync := setupSyncTest(t, "--full", "--watch")
@ -260,7 +260,7 @@ func TestAccSyncFullFileSync(t *testing.T) {
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore"))
} }
func TestAccSyncIncrementalFileSync(t *testing.T) { func TestSyncIncrementalFileSync(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -294,7 +294,7 @@ func TestAccSyncIncrementalFileSync(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(append(repoFiles, ".gitignore"))
} }
func TestAccSyncNestedFolderSync(t *testing.T) { func TestSyncNestedFolderSync(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -322,7 +322,7 @@ func TestAccSyncNestedFolderSync(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(append(repoFiles, ".gitignore"))
} }
func TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) { func TestSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -355,7 +355,7 @@ func TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
assertSync.remoteExists(ctx, "dir1") assertSync.remoteExists(ctx, "dir1")
} }
func TestAccSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) { func TestSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -391,7 +391,7 @@ func TestAccSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
// //
// In the above scenario sync should delete the empty folder and add foo to the remote // In the above scenario sync should delete the empty folder and add foo to the remote
// file system // file system
func TestAccSyncIncrementalFileOverwritesFolder(t *testing.T) { func TestSyncIncrementalFileOverwritesFolder(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -421,7 +421,7 @@ func TestAccSyncIncrementalFileOverwritesFolder(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo")) assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo"))
} }
func TestAccSyncIncrementalSyncPythonNotebookToFile(t *testing.T) { func TestSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -452,7 +452,7 @@ func TestAccSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(append(repoFiles, ".gitignore"))
} }
func TestAccSyncIncrementalSyncFileToPythonNotebook(t *testing.T) { func TestSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -476,7 +476,7 @@ func TestAccSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py")) assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py"))
} }
func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) { func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
ctx := context.Background() ctx := context.Background()
assertSync := setupSyncTest(t, "--watch") assertSync := setupSyncTest(t, "--watch")
@ -498,7 +498,7 @@ func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore"))
} }
func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient()) wsc := databricks.Must(databricks.NewWorkspaceClient())
@ -518,7 +518,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
assert.ErrorContains(t, err, " does not exist; please create it first") assert.ErrorContains(t, err, " does not exist; please create it first")
} }
func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { func TestSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient()) wsc := databricks.Must(databricks.NewWorkspaceClient())
@ -540,7 +540,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
require.Equal(t, workspace.ObjectTypeDirectory, info.ObjectType) require.Equal(t, workspace.ObjectTypeDirectory, info.ObjectType)
} }
func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) { func TestSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient()) wsc := databricks.Must(databricks.NewWorkspaceClient())

View File

@ -90,7 +90,7 @@ func runTagTestCases(t *testing.T, cases []tagTestCase) {
} }
} }
func TestAccTagKeyAWS(t *testing.T) { func TestTagKeyAWS(t *testing.T) {
testutil.Require(t, testutil.AWS) testutil.Require(t, testutil.AWS)
t.Parallel() t.Parallel()
@ -122,7 +122,7 @@ func TestAccTagKeyAWS(t *testing.T) {
}) })
} }
func TestAccTagValueAWS(t *testing.T) { func TestTagValueAWS(t *testing.T) {
testutil.Require(t, testutil.AWS) testutil.Require(t, testutil.AWS)
t.Parallel() t.Parallel()
@ -148,7 +148,7 @@ func TestAccTagValueAWS(t *testing.T) {
}) })
} }
func TestAccTagKeyAzure(t *testing.T) { func TestTagKeyAzure(t *testing.T) {
testutil.Require(t, testutil.Azure) testutil.Require(t, testutil.Azure)
t.Parallel() t.Parallel()
@ -180,7 +180,7 @@ func TestAccTagKeyAzure(t *testing.T) {
}) })
} }
func TestAccTagValueAzure(t *testing.T) { func TestTagValueAzure(t *testing.T) {
testutil.Require(t, testutil.Azure) testutil.Require(t, testutil.Azure)
t.Parallel() t.Parallel()
@ -200,7 +200,7 @@ func TestAccTagValueAzure(t *testing.T) {
}) })
} }
func TestAccTagKeyGCP(t *testing.T) { func TestTagKeyGCP(t *testing.T) {
testutil.Require(t, testutil.GCP) testutil.Require(t, testutil.GCP)
t.Parallel() t.Parallel()
@ -232,7 +232,7 @@ func TestAccTagKeyGCP(t *testing.T) {
}) })
} }
func TestAccTagValueGCP(t *testing.T) { func TestTagValueGCP(t *testing.T) {
testutil.Require(t, testutil.GCP) testutil.Require(t, testutil.GCP)
t.Parallel() t.Parallel()

View File

@ -19,7 +19,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccWorkspaceList(t *testing.T) { func TestWorkspaceList(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/") stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/")
@ -41,7 +41,7 @@ func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) {
assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0")
} }
func TestAccWorkpaceExportPrintsContents(t *testing.T) { func TestWorkpaceExportPrintsContents(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
@ -92,7 +92,7 @@ func assertWorkspaceFileType(t *testing.T, ctx context.Context, f filer.Filer, p
assert.Equal(t, fileType, info.Sys().(workspace.ObjectInfo).ObjectType) assert.Equal(t, fileType, info.Sys().(workspace.ObjectInfo).ObjectType)
} }
func TestAccExportDir(t *testing.T) { func TestExportDir(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t) ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
targetDir := t.TempDir() targetDir := t.TempDir()
@ -137,7 +137,7 @@ func TestAccExportDir(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "a/b/c/file-b"), "def") assertLocalFileContents(t, filepath.Join(targetDir, "a/b/c/file-b"), "def")
} }
func TestAccExportDirDoesNotOverwrite(t *testing.T) { func TestExportDirDoesNotOverwrite(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t) ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
targetDir := t.TempDir() targetDir := t.TempDir()
@ -158,7 +158,7 @@ func TestAccExportDirDoesNotOverwrite(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "local content") assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "local content")
} }
func TestAccExportDirWithOverwriteFlag(t *testing.T) { func TestExportDirWithOverwriteFlag(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t) ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
targetDir := t.TempDir() targetDir := t.TempDir()
@ -179,7 +179,7 @@ func TestAccExportDirWithOverwriteFlag(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace") assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace")
} }
func TestAccImportDir(t *testing.T) { func TestImportDir(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
stdout, stderr := RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug") stdout, stderr := RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug")
@ -208,7 +208,7 @@ func TestAccImportDir(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "jupyterNotebook", "# Databricks notebook source\nprint(\"jupyter\")") assertFilerFileContents(t, ctx, workspaceFiler, "jupyterNotebook", "# Databricks notebook source\nprint(\"jupyter\")")
} }
func TestAccImportDirDoesNotOverwrite(t *testing.T) { func TestImportDirDoesNotOverwrite(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
var err error var err error
@ -236,7 +236,7 @@ func TestAccImportDirDoesNotOverwrite(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")")
} }
func TestAccImportDirWithOverwriteFlag(t *testing.T) { func TestImportDirWithOverwriteFlag(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
var err error var err error
@ -264,7 +264,7 @@ func TestAccImportDirWithOverwriteFlag(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")")
} }
func TestAccExport(t *testing.T) { func TestExport(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t) ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
var err error var err error
@ -293,7 +293,7 @@ func TestAccExport(t *testing.T) {
assert.Contains(t, string(b), `"metadata":`, "jupyter notebooks contain the metadata field") assert.Contains(t, string(b), `"metadata":`, "jupyter notebooks contain the metadata field")
} }
func TestAccExportWithFileFlag(t *testing.T) { func TestExportWithFileFlag(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t) ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
localTmpDir := t.TempDir() localTmpDir := t.TempDir()
@ -327,7 +327,7 @@ func TestAccExportWithFileFlag(t *testing.T) {
assertLocalFileContents(t, filepath.Join(localTmpDir, "jupyterNb.ipynb"), `"metadata":`) assertLocalFileContents(t, filepath.Join(localTmpDir, "jupyterNb.ipynb"), `"metadata":`)
} }
func TestAccImportFileUsingContentFormatSource(t *testing.T) { func TestImportFileUsingContentFormatSource(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Content = `print(1)`. Uploaded as a notebook by default // Content = `print(1)`. Uploaded as a notebook by default
@ -344,7 +344,7 @@ func TestAccImportFileUsingContentFormatSource(t *testing.T) {
assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNb", workspace.ObjectTypeNotebook) assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNb", workspace.ObjectTypeNotebook)
} }
func TestAccImportFileUsingContentFormatAuto(t *testing.T) { func TestImportFileUsingContentFormatAuto(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Content = `# Databricks notebook source\nprint(1)`. Upload as file if path has no extension. // Content = `# Databricks notebook source\nprint(1)`. Upload as file if path has no extension.
@ -366,7 +366,7 @@ func TestAccImportFileUsingContentFormatAuto(t *testing.T) {
assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile) assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile)
} }
func TestAccImportFileFormatSource(t *testing.T) { func TestImportFileFormatSource(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON") RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON")
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")")
@ -380,7 +380,7 @@ func TestAccImportFileFormatSource(t *testing.T) {
assert.ErrorContains(t, err, "The zip file may not be valid or may be an unsupported version. Hint: Objects imported using format=SOURCE are expected to be zip encoded databricks source notebook(s) by default. Please specify a language using the --language flag if you are trying to import a single uncompressed notebook") assert.ErrorContains(t, err, "The zip file may not be valid or may be an unsupported version. Hint: Objects imported using format=SOURCE are expected to be zip encoded databricks source notebook(s) by default. Please specify a language using the --language flag if you are trying to import a single uncompressed notebook")
} }
func TestAccImportFileFormatAuto(t *testing.T) { func TestImportFileFormatAuto(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Upload as file if path has no extension // Upload as file if path has no extension

View File

@ -255,7 +255,7 @@ func (c *config) promptForValues(r *renderer) error {
// Prompt user for any missing config values. Assign default values if // Prompt user for any missing config values. Assign default values if
// terminal is not TTY // terminal is not TTY
func (c *config) promptOrAssignDefaultValues(r *renderer) error { func (c *config) promptOrAssignDefaultValues(r *renderer) error {
// TODO: replace with IsPromptSupported call (requires fixing TestAccBundleInitErrorOnUnknownFields test) // TODO: replace with IsPromptSupported call (requires fixing TestBundleInitErrorOnUnknownFields test)
if cmdio.IsOutTTY(c.ctx) && cmdio.IsInTTY(c.ctx) && !cmdio.IsGitBash(c.ctx) { if cmdio.IsOutTTY(c.ctx) && cmdio.IsInTTY(c.ctx) && !cmdio.IsGitBash(c.ctx) {
return c.promptForValues(r) return c.promptForValues(r)
} }