Consolidate helper functions to `internal/testutil` package (#2002)

## Changes

This is one step (of many) toward moving the integration tests around.

This change consolidates the following functions:

* `ReadFile` / `WriteFile`
* `GetEnvOrSkipTest`
* `RandomName`

## Tests

n/a
This commit is contained in:
Pieter Noordhuis 2024-12-12 13:35:38 +01:00 committed by GitHub
parent a7e91a5b68
commit 241fcfffb0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 145 additions and 158 deletions

View File

@ -2,6 +2,7 @@ package validate
import ( import (
"context" "context"
"path/filepath"
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -81,7 +82,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) {
b := setupBundleForFilesToSyncTest(t) b := setupBundleForFilesToSyncTest(t)
// Ignore all files. // Ignore all files.
testutil.WriteFile(t, "*\n.*\n", b.BundleRootPath, ".gitignore") testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n")
ctx := context.Background() ctx := context.Background()
rb := bundle.ReadOnly(b) rb := bundle.ReadOnly(b)

View File

@ -139,7 +139,7 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle
require.NoError(t, err) require.NoError(t, err)
// Write fake state file. // Write fake state file.
testutil.WriteFile(t, ` testutil.WriteFile(t, filepath.Join(tfDir, TerraformStateFileName), `
{ {
"version": 4, "version": 4,
"terraform_version": "1.5.5", "terraform_version": "1.5.5",
@ -187,5 +187,5 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle
} }
] ]
} }
`, filepath.Join(tfDir, TerraformStateFileName)) `)
} }

View File

@ -6,6 +6,7 @@ import (
"os" "os"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
@ -26,7 +27,7 @@ type WorkspaceT struct {
func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
loadDebugEnvIfRunFromIDE(t, "workspace") loadDebugEnvIfRunFromIDE(t, "workspace")
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
require.NoError(t, err) require.NoError(t, err)
@ -46,7 +47,7 @@ func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
loadDebugEnvIfRunFromIDE(t, "workspace") loadDebugEnvIfRunFromIDE(t, "workspace")
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
if os.Getenv("TEST_METASTORE_ID") == "" { if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skipf("Skipping on non-UC workspaces") t.Skipf("Skipping on non-UC workspaces")
@ -70,7 +71,7 @@ func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
} }
func (t *WorkspaceT) TestClusterID() string { func (t *WorkspaceT) TestClusterID() string {
clusterID := GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID") clusterID := testutil.GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID")
err := t.W.Clusters.EnsureClusterIsRunning(t.ctx, clusterID) err := t.W.Clusters.EnsureClusterIsRunning(t.ctx, clusterID)
require.NoError(t, err) require.NoError(t, err)
return clusterID return clusterID
@ -103,7 +104,7 @@ func (t *WorkspaceT) TemporaryWorkspaceDir(name ...string) string {
me, err := t.W.CurrentUser.Me(ctx) me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName(name...)) basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName(name...))
t.Logf("Creating %s", basePath) t.Logf("Creating %s", basePath)
err = t.W.Workspace.MkdirsByPath(ctx, basePath) err = t.W.Workspace.MkdirsByPath(ctx, basePath)

View File

@ -3,11 +3,12 @@ package internal
import ( import (
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) { func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "alerts-legacy", "create") _, _, err := RequireErrorRun(t, "alerts-legacy", "create")
assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error()) assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error())

View File

@ -4,16 +4,18 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"path" "path"
"path/filepath"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
_ "github.com/databricks/cli/cmd/api" _ "github.com/databricks/cli/cmd/api"
"github.com/databricks/cli/internal/testutil"
) )
func TestAccApiGet(t *testing.T) { func TestAccApiGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me") stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me")
@ -28,14 +30,15 @@ func TestAccApiGet(t *testing.T) {
} }
func TestAccApiPost(t *testing.T) { func TestAccApiPost(t *testing.T) {
env := GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
if env == "gcp" { if env == "gcp" {
t.Skip("DBFS REST API is disabled on gcp") t.Skip("DBFS REST API is disabled on gcp")
} }
dbfsPath := path.Join("/tmp/databricks/integration", RandomName("api-post")) dbfsPath := path.Join("/tmp/databricks/integration", testutil.RandomName("api-post"))
requestPath := writeFile(t, "body.json", fmt.Sprintf(`{ requestPath := filepath.Join(t.TempDir(), "body.json")
testutil.WriteFile(t, requestPath, fmt.Sprintf(`{
"path": "%s" "path": "%s"
}`, dbfsPath)) }`, dbfsPath))

View File

@ -5,12 +5,13 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAuthDescribeSuccess(t *testing.T) { func TestAuthDescribeSuccess(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "auth", "describe") stdout, _ := RequireSuccessfulRun(t, "auth", "describe")
outStr := stdout.String() outStr := stdout.String()
@ -31,7 +32,7 @@ func TestAuthDescribeSuccess(t *testing.T) {
} }
func TestAuthDescribeFailure(t *testing.T) { func TestAuthDescribeFailure(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent") stdout, _ := RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent")
outStr := stdout.String() outStr := stdout.String()

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
@ -234,7 +235,7 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
schemaName := internal.RandomName("schema-") schemaName := testutil.RandomName("schema-")
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{ _, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main", CatalogName: "main",
@ -271,7 +272,7 @@ func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
schemaName := internal.RandomName("schema-") schemaName := testutil.RandomName("schema-")
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{ _, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main", CatalogName: "main",

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/google/uuid" "github.com/google/uuid"
@ -16,7 +17,7 @@ import (
) )
func TestAccBindJobToExistingJob(t *testing.T) { func TestAccBindJobToExistingJob(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
@ -81,7 +82,7 @@ func TestAccBindJobToExistingJob(t *testing.T) {
} }
func TestAccAbortBind(t *testing.T) { func TestAccAbortBind(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
@ -130,7 +131,7 @@ func TestAccAbortBind(t *testing.T) {
} }
func TestAccGenerateAndBind(t *testing.T) { func TestAccGenerateAndBind(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)

View File

@ -5,6 +5,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/google/uuid" "github.com/google/uuid"
@ -15,7 +16,7 @@ import (
func TestAccDashboards(t *testing.T) { func TestAccDashboards(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)
warehouseID := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") warehouseID := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
uniqueID := uuid.New().String() uniqueID := uuid.New().String()
root, err := initTestTemplate(t, ctx, "dashboards", map[string]any{ root, err := initTestTemplate(t, ctx, "dashboards", map[string]any{
"unique_id": uniqueID, "unique_id": uniqueID,

View File

@ -9,12 +9,13 @@ import (
"github.com/databricks/cli/bundle/deploy" "github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) { func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
ctx, wt := acc.WorkspaceTest(t) ctx, wt := acc.WorkspaceTest(t)

View File

@ -95,7 +95,7 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 {
require.NoError(t, err) require.NoError(t, err)
resp, err := w.Jobs.Create(ctx, jobs.CreateJob{ resp, err := w.Jobs.Create(ctx, jobs.CreateJob{
Name: internal.RandomName("generated-job-"), Name: testutil.RandomName("generated-job-"),
Tasks: []jobs.Task{ Tasks: []jobs.Task{
{ {
TaskKey: "test", TaskKey: "test",

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
@ -58,7 +59,7 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) {
generatedYaml := string(data) generatedYaml := string(data)
// Replace pipeline name // Replace pipeline name
generatedYaml = strings.ReplaceAll(generatedYaml, name, internal.RandomName("copy-generated-pipeline-")) generatedYaml = strings.ReplaceAll(generatedYaml, name, testutil.RandomName("copy-generated-pipeline-"))
err = os.WriteFile(fileName, []byte(generatedYaml), 0o644) err = os.WriteFile(fileName, []byte(generatedYaml), 0o644)
require.NoError(t, err) require.NoError(t, err)
@ -94,10 +95,10 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string,
err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')")) err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')"))
require.NoError(t, err) require.NoError(t, err)
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(env) nodeTypeId := internal.GetNodeTypeId(env)
name := internal.RandomName("generated-pipeline-") name := testutil.RandomName("generated-pipeline-")
resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{ resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{
Name: name, Name: name,
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{

View File

@ -13,7 +13,7 @@ import (
) )
func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) { func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) {
cloudEnv := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") cloudEnv := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(cloudEnv) nodeTypeId := internal.GetNodeTypeId(cloudEnv)
tmpDir := t.TempDir() tmpDir := t.TempDir()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID") instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
@ -54,7 +54,7 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
} }
func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0") testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions: // Failure on earlier DBR versions:
@ -78,7 +78,7 @@ func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
} }
func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) { func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0") testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions: // Failure on earlier DBR versions:

View File

@ -3,6 +3,7 @@ package bundle
import ( import (
"context" "context"
"encoding/json" "encoding/json"
"path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
@ -16,7 +17,7 @@ func TestAccBundleValidate(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
tmpDir := t.TempDir() tmpDir := t.TempDir()
testutil.WriteFile(t, testutil.WriteFile(t, filepath.Join(tmpDir, "databricks.yml"),
` `
bundle: bundle:
name: "foobar" name: "foobar"
@ -33,7 +34,7 @@ resources:
inner_loop: inner_loop:
name: inner loop name: inner loop
`, tmpDir, "databricks.yml") `)
ctx := context.Background() ctx := context.Background()
stdout, err := validateBundle(t, ctx, tmpDir) stdout, err := validateBundle(t, ctx, tmpDir)

View File

@ -6,6 +6,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing" "github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -13,7 +14,7 @@ import (
) )
func TestAccClustersList(t *testing.T) { func TestAccClustersList(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "clusters", "list") stdout, stderr := RequireSuccessfulRun(t, "clusters", "list")
outStr := stdout.String() outStr := stdout.String()
@ -28,7 +29,7 @@ func TestAccClustersList(t *testing.T) {
} }
func TestAccClustersGet(t *testing.T) { func TestAccClustersGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
clusterId := findValidClusterID(t) clusterId := findValidClusterID(t)
stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId) stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId)

View File

@ -5,6 +5,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
@ -25,7 +26,7 @@ func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) {
dashboardName := "New Dashboard" dashboardName := "New Dashboard"
dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`) dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`)
warehouseId := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") warehouseId := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-") dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-")

View File

@ -12,6 +12,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -410,33 +411,33 @@ func TestAccFilerWorkspaceNotebook(t *testing.T) {
{ {
name: "pythonJupyterNb.ipynb", name: "pythonJupyterNb.ipynb",
nameWithoutExt: "pythonJupyterNb", nameWithoutExt: "pythonJupyterNb",
content1: readFile(t, "testdata/notebooks/py1.ipynb"), content1: testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"),
expected1: "# Databricks notebook source\nprint(1)", expected1: "# Databricks notebook source\nprint(1)",
content2: readFile(t, "testdata/notebooks/py2.ipynb"), content2: testutil.ReadFile(t, "testdata/notebooks/py2.ipynb"),
expected2: "# Databricks notebook source\nprint(2)", expected2: "# Databricks notebook source\nprint(2)",
}, },
{ {
name: "rJupyterNb.ipynb", name: "rJupyterNb.ipynb",
nameWithoutExt: "rJupyterNb", nameWithoutExt: "rJupyterNb",
content1: readFile(t, "testdata/notebooks/r1.ipynb"), content1: testutil.ReadFile(t, "testdata/notebooks/r1.ipynb"),
expected1: "# Databricks notebook source\nprint(1)", expected1: "# Databricks notebook source\nprint(1)",
content2: readFile(t, "testdata/notebooks/r2.ipynb"), content2: testutil.ReadFile(t, "testdata/notebooks/r2.ipynb"),
expected2: "# Databricks notebook source\nprint(2)", expected2: "# Databricks notebook source\nprint(2)",
}, },
{ {
name: "scalaJupyterNb.ipynb", name: "scalaJupyterNb.ipynb",
nameWithoutExt: "scalaJupyterNb", nameWithoutExt: "scalaJupyterNb",
content1: readFile(t, "testdata/notebooks/scala1.ipynb"), content1: testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb"),
expected1: "// Databricks notebook source\nprintln(1)", expected1: "// Databricks notebook source\nprintln(1)",
content2: readFile(t, "testdata/notebooks/scala2.ipynb"), content2: testutil.ReadFile(t, "testdata/notebooks/scala2.ipynb"),
expected2: "// Databricks notebook source\nprintln(2)", expected2: "// Databricks notebook source\nprintln(2)",
}, },
{ {
name: "sqlJupyterNotebook.ipynb", name: "sqlJupyterNotebook.ipynb",
nameWithoutExt: "sqlJupyterNotebook", nameWithoutExt: "sqlJupyterNotebook",
content1: readFile(t, "testdata/notebooks/sql1.ipynb"), content1: testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb"),
expected1: "-- Databricks notebook source\nselect 1", expected1: "-- Databricks notebook source\nselect 1",
content2: readFile(t, "testdata/notebooks/sql2.ipynb"), content2: testutil.ReadFile(t, "testdata/notebooks/sql2.ipynb"),
expected2: "-- Databricks notebook source\nselect 2", expected2: "-- Databricks notebook source\nselect 2",
}, },
} }
@ -483,13 +484,13 @@ func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
{"foo.r", "print('foo')"}, {"foo.r", "print('foo')"},
{"foo.scala", "println('foo')"}, {"foo.scala", "println('foo')"},
{"foo.sql", "SELECT 'foo'"}, {"foo.sql", "SELECT 'foo'"},
{"py1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")}, {"py1.ipynb", testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")},
{"pyNb.py", "# Databricks notebook source\nprint('first upload'))"}, {"pyNb.py", "# Databricks notebook source\nprint('first upload'))"},
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")}, {"r1.ipynb", testutil.ReadFile(t, "testdata/notebooks/r1.ipynb")},
{"rNb.r", "# Databricks notebook source\nprint('first upload'))"}, {"rNb.r", "# Databricks notebook source\nprint('first upload'))"},
{"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")}, {"scala1.ipynb", testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb")},
{"scalaNb.scala", "// Databricks notebook source\n println(\"first upload\"))"}, {"scalaNb.scala", "// Databricks notebook source\n println(\"first upload\"))"},
{"sql1.ipynb", readFile(t, "testdata/notebooks/sql1.ipynb")}, {"sql1.ipynb", testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb")},
{"sqlNb.sql", "-- Databricks notebook source\n SELECT \"first upload\""}, {"sqlNb.sql", "-- Databricks notebook source\n SELECT \"first upload\""},
} }
@ -554,10 +555,10 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer {
}{ }{
{"foo.py", "# Databricks notebook source\nprint('first upload'))"}, {"foo.py", "# Databricks notebook source\nprint('first upload'))"},
{"bar.py", "print('foo')"}, {"bar.py", "print('foo')"},
{"p1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")}, {"p1.ipynb", testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")},
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")}, {"r1.ipynb", testutil.ReadFile(t, "testdata/notebooks/r1.ipynb")},
{"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")}, {"scala1.ipynb", testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb")},
{"sql1.ipynb", readFile(t, "testdata/notebooks/sql1.ipynb")}, {"sql1.ipynb", testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb")},
{"pretender", "not a notebook"}, {"pretender", "not a notebook"},
{"dir/file.txt", "file content"}, {"dir/file.txt", "file content"},
{"scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')"}, {"scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')"},
@ -729,7 +730,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
wf, _ := setupWsfsExtensionsFiler(t) wf, _ := setupWsfsExtensionsFiler(t)
// Create a notebook // Create a notebook
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb"))) err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")))
require.NoError(t, err) require.NoError(t, err)
// Reading foo should fail. Even though the WSFS name for the notebook is foo // Reading foo should fail. Even though the WSFS name for the notebook is foo
@ -748,7 +749,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) {
wf, _ := setupWsfsExtensionsFiler(t) wf, _ := setupWsfsExtensionsFiler(t)
// Create a notebook // Create a notebook
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb"))) err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")))
require.NoError(t, err) require.NoError(t, err)
// Stating foo should fail. Even though the WSFS name for the notebook is foo // Stating foo should fail. Even though the WSFS name for the notebook is foo
@ -767,7 +768,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T)
wf, _ := setupWsfsExtensionsFiler(t) wf, _ := setupWsfsExtensionsFiler(t)
// Create a notebook // Create a notebook
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb"))) err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")))
require.NoError(t, err) require.NoError(t, err)
// Deleting foo should fail. Even though the WSFS name for the notebook is foo // Deleting foo should fail. Even though the WSFS name for the notebook is foo
@ -849,25 +850,25 @@ func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
language: "python", language: "python",
sourceName: "foo.py", sourceName: "foo.py",
jupyterName: "foo.ipynb", jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/py1.ipynb"), jupyterContent: testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"),
}, },
{ {
language: "r", language: "r",
sourceName: "foo.r", sourceName: "foo.r",
jupyterName: "foo.ipynb", jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/r1.ipynb"), jupyterContent: testutil.ReadFile(t, "testdata/notebooks/r1.ipynb"),
}, },
{ {
language: "scala", language: "scala",
sourceName: "foo.scala", sourceName: "foo.scala",
jupyterName: "foo.ipynb", jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/scala1.ipynb"), jupyterContent: testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb"),
}, },
{ {
language: "sql", language: "sql",
sourceName: "foo.sql", sourceName: "foo.sql",
jupyterName: "foo.ipynb", jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/sql1.ipynb"), jupyterContent: testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb"),
}, },
} { } {
t.Run("jupyter_"+tc.language, func(t *testing.T) { t.Run("jupyter_"+tc.language, func(t *testing.T) {

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -70,14 +71,14 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) {
} }
func TestAccFsCatForDbfsInvalidScheme(t *testing.T) { func TestAccFsCatForDbfsInvalidScheme(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file") _, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
assert.ErrorContains(t, err, "invalid scheme: dab") assert.ErrorContains(t, err, "invalid scheme: dab")
} }
func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) { func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()

View File

@ -10,6 +10,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -355,7 +356,7 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
} }
func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) { func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b") _, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b")
assert.Equal(t, "invalid scheme: https", err.Error()) assert.Equal(t, "invalid scheme: https", err.Error())

View File

@ -10,6 +10,7 @@ import (
"testing" "testing"
_ "github.com/databricks/cli/cmd/fs" _ "github.com/databricks/cli/cmd/fs"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -166,7 +167,7 @@ func TestAccFsLsForNonexistingDir(t *testing.T) {
func TestAccFsLsWithoutScheme(t *testing.T) { func TestAccFsLsWithoutScheme(t *testing.T) {
t.Parallel() t.Parallel()
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json") _, _, err := RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json")
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)

View File

@ -6,12 +6,13 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
func TestAccGitClone(t *testing.T) { func TestAccGitClone(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
ctx := context.Background() ctx := context.Background()
@ -33,7 +34,7 @@ func TestAccGitClone(t *testing.T) {
} }
func TestAccGitCloneOnNonDefaultBranch(t *testing.T) { func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
ctx := context.Background() ctx := context.Background()
@ -54,7 +55,7 @@ func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
} }
func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) { func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()

View File

@ -8,6 +8,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dbr" "github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -42,7 +43,7 @@ func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
me, err := wt.W.CurrentUser.Me(ctx) me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
targetPath := acc.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-")) targetPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-"))
stdout, stderr := RequireSuccessfulRun(t, "repos", "create", examplesRepoUrl, examplesRepoProvider, "--path", targetPath) stdout, stderr := RequireSuccessfulRun(t, "repos", "create", examplesRepoUrl, examplesRepoProvider, "--path", targetPath)
t.Cleanup(func() { t.Cleanup(func() {
RequireSuccessfulRun(t, "repos", "delete", targetPath) RequireSuccessfulRun(t, "repos", "delete", targetPath)
@ -69,7 +70,7 @@ func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
me, err := wt.W.CurrentUser.Me(ctx) me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
rootPath := acc.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-")) rootPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-"))
_, stderr := RequireSuccessfulRun(t, "workspace", "mkdirs", path.Join(rootPath, "a/b/c")) _, stderr := RequireSuccessfulRun(t, "workspace", "mkdirs", path.Join(rootPath, "a/b/c"))
t.Cleanup(func() { t.Cleanup(func() {
RequireSuccessfulRun(t, "workspace", "delete", "--recursive", rootPath) RequireSuccessfulRun(t, "workspace", "delete", "--recursive", rootPath)

View File

@ -8,7 +8,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"math/rand"
"net/http" "net/http"
"os" "os"
"path" "path"
@ -21,6 +20,7 @@ import (
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/cmd" "github.com/databricks/cli/cmd"
@ -41,30 +41,6 @@ import (
_ "github.com/databricks/cli/cmd/workspace" _ "github.com/databricks/cli/cmd/workspace"
) )
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
// GetEnvOrSkipTest proceeds with test only with that env variable
func GetEnvOrSkipTest(t *testing.T, name string) string {
value := os.Getenv(name)
if value == "" {
t.Skipf("Environment variable %s is missing", name)
}
return value
}
// RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-")
func RandomName(prefix ...string) string {
randLen := 12
b := make([]byte, randLen)
for i := range b {
b[i] = charset[rand.Intn(randLen)]
}
if len(prefix) > 0 {
return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b)
}
return string(b)
}
// Helper for running the root command in the background. // Helper for running the root command in the background.
// It ensures that the background goroutine terminates upon // It ensures that the background goroutine terminates upon
// test completion through cancelling the command context. // test completion through cancelling the command context.
@ -355,22 +331,6 @@ func RequireErrorRun(t *testing.T, args ...string) (bytes.Buffer, bytes.Buffer,
return stdout, stderr, err return stdout, stderr, err
} }
func readFile(t *testing.T, name string) string {
b, err := os.ReadFile(name)
require.NoError(t, err)
return string(b)
}
func writeFile(t *testing.T, name, body string) string {
f, err := os.Create(filepath.Join(t.TempDir(), name))
require.NoError(t, err)
_, err = f.WriteString(body)
require.NoError(t, err)
f.Close()
return f.Name()
}
func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0) tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ { for i := 0; i < len(versions); i++ {
@ -443,7 +403,7 @@ func TemporaryWorkspaceDir(t *testing.T, w *databricks.WorkspaceClient) string {
me, err := w.CurrentUser.Me(ctx) me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("integration-test-wsfs-")) basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName("integration-test-wsfs-"))
t.Logf("Creating %s", basePath) t.Logf("Creating %s", basePath)
err = w.Workspace.MkdirsByPath(ctx, basePath) err = w.Workspace.MkdirsByPath(ctx, basePath)
@ -467,7 +427,7 @@ func TemporaryWorkspaceDir(t *testing.T, w *databricks.WorkspaceClient) string {
func TemporaryDbfsDir(t *testing.T, w *databricks.WorkspaceClient) string { func TemporaryDbfsDir(t *testing.T, w *databricks.WorkspaceClient) string {
ctx := context.Background() ctx := context.Background()
path := fmt.Sprintf("/tmp/%s", RandomName("integration-test-dbfs-")) path := fmt.Sprintf("/tmp/%s", testutil.RandomName("integration-test-dbfs-"))
t.Logf("Creating DBFS folder:%s", path) t.Logf("Creating DBFS folder:%s", path)
err := w.Dbfs.MkdirsByPath(ctx, path) err := w.Dbfs.MkdirsByPath(ctx, path)
@ -495,7 +455,7 @@ func TemporaryUcVolume(t *testing.T, w *databricks.WorkspaceClient) string {
// Create a schema // Create a schema
schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{ schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main", CatalogName: "main",
Name: RandomName("test-schema-"), Name: testutil.RandomName("test-schema-"),
}) })
require.NoError(t, err) require.NoError(t, err)
t.Cleanup(func() { t.Cleanup(func() {
@ -528,7 +488,7 @@ func TemporaryRepo(t *testing.T, w *databricks.WorkspaceClient) string {
me, err := w.CurrentUser.Me(ctx) me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("integration-test-repo-")) repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("integration-test-repo-"))
t.Logf("Creating repo:%s", repoPath) t.Logf("Creating repo:%s", repoPath)
repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{ repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{
@ -563,7 +523,7 @@ func GetNodeTypeId(env string) string {
} }
func setupLocalFiler(t *testing.T) (filer.Filer, string) { func setupLocalFiler(t *testing.T) (filer.Filer, string) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmp := t.TempDir() tmp := t.TempDir()
f, err := filer.NewLocalClient(tmp) f, err := filer.NewLocalClient(tmp)
@ -610,7 +570,7 @@ func setupDbfsFiler(t *testing.T) (filer.Filer, string) {
} }
func setupUcVolumesFiler(t *testing.T) (filer.Filer, string) { func setupUcVolumesFiler(t *testing.T) (filer.Filer, string) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
if os.Getenv("TEST_METASTORE_ID") == "" { if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skip("Skipping tests that require a UC Volume when metastore id is not set.") t.Skip("Skipping tests that require a UC Volume when metastore id is not set.")

View File

@ -19,7 +19,7 @@ import (
) )
func TestAccBundleInitErrorOnUnknownFields(t *testing.T) { func TestAccBundleInitErrorOnUnknownFields(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir() tmpDir := t.TempDir()
_, _, err := RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir) _, _, err := RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir)
@ -47,7 +47,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
w, err := databricks.NewWorkspaceClient(&databricks.Config{}) w, err := databricks.NewWorkspaceClient(&databricks.Config{})
require.NoError(t, err) require.NoError(t, err)
projectName := RandomName("project_name_") projectName := testutil.RandomName("project_name_")
// Create a config file with the project name and root dir // Create a config file with the project name and root dir
initConfig := map[string]string{ initConfig := map[string]string{
@ -101,7 +101,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
} }
func TestAccBundleInitHelpers(t *testing.T) { func TestAccBundleInitHelpers(t *testing.T) {
env := GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
w, err := databricks.NewWorkspaceClient(&databricks.Config{}) w, err := databricks.NewWorkspaceClient(&databricks.Config{})

View File

@ -6,13 +6,14 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAccCreateJob(t *testing.T) { func TestAccCreateJob(t *testing.T) {
acc.WorkspaceTest(t) acc.WorkspaceTest(t)
env := GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
if env != "azure" { if env != "azure" {
t.Skipf("Not running test on cloud %s", env) t.Skipf("Not running test on cloud %s", env)
} }

View File

@ -11,6 +11,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
lockpkg "github.com/databricks/cli/libs/locker" lockpkg "github.com/databricks/cli/libs/locker"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -28,7 +29,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
me, err := wsc.CurrentUser.Me(ctx) me, err := wsc.CurrentUser.Me(ctx)
assert.NoError(t, err) assert.NoError(t, err)
remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName(projectNamePrefix)) remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName(projectNamePrefix))
repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{ repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: remoteProjectRoot, Path: remoteProjectRoot,
Url: EmptyRepoUrl, Url: EmptyRepoUrl,
@ -44,7 +45,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
} }
func TestAccLock(t *testing.T) { func TestAccLock(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.TODO() ctx := context.TODO()
wsc, err := databricks.NewWorkspaceClient() wsc, err := databricks.NewWorkspaceClient()
require.NoError(t, err) require.NoError(t, err)
@ -164,7 +165,7 @@ func TestAccLock(t *testing.T) {
} }
func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.Filer) { func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.Filer) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
require.NoError(t, err) require.NoError(t, err)

View File

@ -15,6 +15,7 @@ import (
"github.com/databricks/cli/bundle/run/output" "github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
@ -75,8 +76,8 @@ var sparkVersions = []string{
func TestAccRunPythonTaskWorkspace(t *testing.T) { func TestAccRunPythonTaskWorkspace(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
unsupportedSparkVersionsForWheel := []string{ unsupportedSparkVersionsForWheel := []string{
"11.3.x-scala2.12", "11.3.x-scala2.12",
@ -96,8 +97,8 @@ func TestAccRunPythonTaskWorkspace(t *testing.T) {
func TestAccRunPythonTaskDBFS(t *testing.T) { func TestAccRunPythonTaskDBFS(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
runPythonTasks(t, prepareDBFSFiles(t), testOpts{ runPythonTasks(t, prepareDBFSFiles(t), testOpts{
name: "Python tasks from DBFS", name: "Python tasks from DBFS",
@ -109,8 +110,8 @@ func TestAccRunPythonTaskDBFS(t *testing.T) {
func TestAccRunPythonTaskRepo(t *testing.T) { func TestAccRunPythonTaskRepo(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV") testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
runPythonTasks(t, prepareRepoFiles(t), testOpts{ runPythonTasks(t, prepareRepoFiles(t), testOpts{
name: "Python tasks from Repo", name: "Python tasks from Repo",
@ -121,7 +122,7 @@ func TestAccRunPythonTaskRepo(t *testing.T) {
} }
func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) { func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env) t.Log(env)
w := tw.w w := tw.w

View File

@ -6,6 +6,7 @@ import (
"strconv" "strconv"
"testing" "testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
@ -16,7 +17,7 @@ import (
func synthesizeTemporaryRepoPath(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) string { func synthesizeTemporaryRepoPath(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) string {
me, err := w.CurrentUser.Me(ctx) me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-integration-")) repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("empty-repo-integration-"))
// Cleanup if repo was created at specified path. // Cleanup if repo was created at specified path.
t.Cleanup(func() { t.Cleanup(func() {
@ -44,7 +45,7 @@ func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx contex
} }
func TestAccReposCreateWithProvider(t *testing.T) { func TestAccReposCreateWithProvider(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
@ -61,7 +62,7 @@ func TestAccReposCreateWithProvider(t *testing.T) {
} }
func TestAccReposCreateWithoutProvider(t *testing.T) { func TestAccReposCreateWithoutProvider(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
@ -78,7 +79,7 @@ func TestAccReposCreateWithoutProvider(t *testing.T) {
} }
func TestAccReposGet(t *testing.T) { func TestAccReposGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
@ -107,7 +108,7 @@ func TestAccReposGet(t *testing.T) {
} }
func TestAccReposUpdate(t *testing.T) { func TestAccReposUpdate(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
@ -128,7 +129,7 @@ func TestAccReposUpdate(t *testing.T) {
} }
func TestAccReposDeleteByID(t *testing.T) { func TestAccReposDeleteByID(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()
@ -147,7 +148,7 @@ func TestAccReposDeleteByID(t *testing.T) {
} }
func TestAccReposDeleteByPath(t *testing.T) { func TestAccReposDeleteByPath(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w, err := databricks.NewWorkspaceClient() w, err := databricks.NewWorkspaceClient()

View File

@ -7,6 +7,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -18,7 +19,7 @@ func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) {
} }
func temporarySecretScope(ctx context.Context, t *acc.WorkspaceT) string { func temporarySecretScope(ctx context.Context, t *acc.WorkspaceT) string {
scope := acc.RandomName("cli-acc-") scope := testutil.RandomName("cli-acc-")
err := t.W.Secrets.CreateScope(ctx, workspace.CreateScope{ err := t.W.Secrets.CreateScope(ctx, workspace.CreateScope{
Scope: scope, Scope: scope,
}) })

View File

@ -4,6 +4,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -11,7 +12,7 @@ func TestAccStorageCredentialsListRendersResponse(t *testing.T) {
_, _ = acc.WorkspaceTest(t) _, _ = acc.WorkspaceTest(t)
// Check if metastore is assigned for the workspace, otherwise test will fail // Check if metastore is assigned for the workspace, otherwise test will fail
t.Log(GetEnvOrSkipTest(t, "TEST_METASTORE_ID")) t.Log(testutil.GetEnvOrSkipTest(t, "TEST_METASTORE_ID"))
stdout, stderr := RequireSuccessfulRun(t, "storage-credentials", "list") stdout, stderr := RequireSuccessfulRun(t, "storage-credentials", "list")
assert.NotEmpty(t, stdout) assert.NotEmpty(t, stdout)

View File

@ -16,6 +16,7 @@ import (
"time" "time"
_ "github.com/databricks/cli/cmd/sync" _ "github.com/databricks/cli/cmd/sync"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
"github.com/databricks/cli/libs/testfile" "github.com/databricks/cli/libs/testfile"
@ -36,7 +37,7 @@ var (
func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Context) (localRoot, remoteRoot string) { func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Context) (localRoot, remoteRoot string) {
me, err := wsc.CurrentUser.Me(ctx) me, err := wsc.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-sync-integration-")) repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("empty-repo-sync-integration-"))
repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{ repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: repoPath, Path: repoPath,
@ -71,7 +72,7 @@ type syncTest struct {
} }
func setupSyncTest(t *testing.T, args ...string) *syncTest { func setupSyncTest(t *testing.T, args ...string) *syncTest {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w := databricks.Must(databricks.NewWorkspaceClient()) w := databricks.Must(databricks.NewWorkspaceClient())
localRoot := t.TempDir() localRoot := t.TempDir()
@ -499,7 +500,7 @@ func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
} }
func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient()) wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background() ctx := context.Background()
@ -508,7 +509,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// Hypothetical repo path doesn't exist. // Hypothetical repo path doesn't exist.
nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("doesnt-exist-")) nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("doesnt-exist-"))
err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath, nil) err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath, nil)
assert.ErrorContains(t, err, " does not exist; please create it first") assert.ErrorContains(t, err, " does not exist; please create it first")
@ -519,7 +520,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
} }
func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient()) wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background() ctx := context.Background()
@ -541,14 +542,14 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
} }
func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) { func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient()) wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background() ctx := context.Background()
me, err := wsc.CurrentUser.Me(ctx) me, err := wsc.CurrentUser.Me(ctx)
require.NoError(t, err) require.NoError(t, err)
remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("ensure-path-exists-test-")) remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName("ensure-path-exists-test-"))
err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath, me) err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath, me)
assert.NoError(t, err) assert.NoError(t, err)

View File

@ -28,7 +28,7 @@ func testTags(t *testing.T, tags map[string]string) error {
ctx := context.Background() ctx := context.Background()
resp, err := w.Jobs.Create(ctx, jobs.CreateJob{ resp, err := w.Jobs.Create(ctx, jobs.CreateJob{
Name: RandomName("test-tags-"), Name: testutil.RandomName("test-tags-"),
Tasks: []jobs.Task{ Tasks: []jobs.Task{
{ {
TaskKey: "test", TaskKey: "test",

View File

@ -39,15 +39,6 @@ func CleanupEnvironment(t *testing.T) {
} }
} }
// GetEnvOrSkipTest proceeds with test only with that env variable
func GetEnvOrSkipTest(t *testing.T, name string) string {
value := os.Getenv(name)
if value == "" {
t.Skipf("Environment variable %s is missing", name)
}
return value
}
// Changes into specified directory for the duration of the test. // Changes into specified directory for the duration of the test.
// Returns the current working directory. // Returns the current working directory.
func Chdir(t *testing.T, dir string) string { func Chdir(t *testing.T, dir string) string {

View File

@ -31,8 +31,8 @@ func Touch(t *testing.T, elems ...string) string {
return path return path
} }
func WriteFile(t *testing.T, content string, elems ...string) string { // WriteFile writes content to a file.
path := filepath.Join(elems...) func WriteFile(t *testing.T, path, content string) {
err := os.MkdirAll(filepath.Dir(path), 0o755) err := os.MkdirAll(filepath.Dir(path), 0o755)
require.NoError(t, err) require.NoError(t, err)
@ -44,5 +44,12 @@ func WriteFile(t *testing.T, content string, elems ...string) string {
err = f.Close() err = f.Close()
require.NoError(t, err) require.NoError(t, err)
return path }
// ReadFile reads a file and returns its content as a string.
func ReadFile(t require.TestingT, path string) string {
b, err := os.ReadFile(path)
require.NoError(t, err)
return string(b)
} }

View File

@ -1,4 +1,4 @@
package acc package testutil
import ( import (
"fmt" "fmt"

View File

@ -12,6 +12,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
@ -20,7 +21,7 @@ import (
) )
func TestAccWorkspaceList(t *testing.T) { func TestAccWorkspaceList(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/") stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/")
outStr := stdout.String() outStr := stdout.String()
@ -42,7 +43,7 @@ func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) {
} }
func TestAccWorkpaceExportPrintsContents(t *testing.T) { func TestAccWorkpaceExportPrintsContents(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background() ctx := context.Background()
w := databricks.Must(databricks.NewWorkspaceClient()) w := databricks.Must(databricks.NewWorkspaceClient())