Consolidate helper functions to `internal/testutil` package (#2002)

## Changes

This is one step (of many) toward moving the integration tests around.

This change consolidates the following functions:

* `ReadFile` / `WriteFile`
* `GetEnvOrSkipTest`
* `RandomName`

## Tests

n/a
This commit is contained in:
Pieter Noordhuis 2024-12-12 13:35:38 +01:00 committed by GitHub
parent a7e91a5b68
commit 241fcfffb0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 145 additions and 158 deletions

View File

@ -2,6 +2,7 @@ package validate
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
@ -81,7 +82,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) {
b := setupBundleForFilesToSyncTest(t)
// Ignore all files.
testutil.WriteFile(t, "*\n.*\n", b.BundleRootPath, ".gitignore")
testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n")
ctx := context.Background()
rb := bundle.ReadOnly(b)

View File

@ -139,7 +139,7 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle
require.NoError(t, err)
// Write fake state file.
testutil.WriteFile(t, `
testutil.WriteFile(t, filepath.Join(tfDir, TerraformStateFileName), `
{
"version": 4,
"terraform_version": "1.5.5",
@ -187,5 +187,5 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle
}
]
}
`, filepath.Join(tfDir, TerraformStateFileName))
`)
}

View File

@ -6,6 +6,7 @@ import (
"os"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/compute"
@ -26,7 +27,7 @@ type WorkspaceT struct {
func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
loadDebugEnvIfRunFromIDE(t, "workspace")
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
@ -46,7 +47,7 @@ func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
loadDebugEnvIfRunFromIDE(t, "workspace")
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skipf("Skipping on non-UC workspaces")
@ -70,7 +71,7 @@ func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) {
}
func (t *WorkspaceT) TestClusterID() string {
clusterID := GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID")
clusterID := testutil.GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID")
err := t.W.Clusters.EnsureClusterIsRunning(t.ctx, clusterID)
require.NoError(t, err)
return clusterID
@ -103,7 +104,7 @@ func (t *WorkspaceT) TemporaryWorkspaceDir(name ...string) string {
me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName(name...))
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName(name...))
t.Logf("Creating %s", basePath)
err = t.W.Workspace.MkdirsByPath(ctx, basePath)

View File

@ -3,11 +3,12 @@ package internal
import (
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert"
)
func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "alerts-legacy", "create")
assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error())

View File

@ -4,16 +4,18 @@ import (
"encoding/json"
"fmt"
"path"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
_ "github.com/databricks/cli/cmd/api"
"github.com/databricks/cli/internal/testutil"
)
func TestAccApiGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me")
@ -28,14 +30,15 @@ func TestAccApiGet(t *testing.T) {
}
func TestAccApiPost(t *testing.T) {
env := GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
if env == "gcp" {
t.Skip("DBFS REST API is disabled on gcp")
}
dbfsPath := path.Join("/tmp/databricks/integration", RandomName("api-post"))
requestPath := writeFile(t, "body.json", fmt.Sprintf(`{
dbfsPath := path.Join("/tmp/databricks/integration", testutil.RandomName("api-post"))
requestPath := filepath.Join(t.TempDir(), "body.json")
testutil.WriteFile(t, requestPath, fmt.Sprintf(`{
"path": "%s"
}`, dbfsPath))

View File

@ -5,12 +5,13 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/require"
)
func TestAuthDescribeSuccess(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "auth", "describe")
outStr := stdout.String()
@ -31,7 +32,7 @@ func TestAuthDescribeSuccess(t *testing.T) {
}
func TestAuthDescribeFailure(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent")
outStr := stdout.String()

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
@ -234,7 +235,7 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
schemaName := internal.RandomName("schema-")
schemaName := testutil.RandomName("schema-")
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main",
@ -271,7 +272,7 @@ func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
schemaName := internal.RandomName("schema-")
schemaName := testutil.RandomName("schema-")
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main",

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/google/uuid"
@ -16,7 +17,7 @@ import (
)
func TestAccBindJobToExistingJob(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
@ -81,7 +82,7 @@ func TestAccBindJobToExistingJob(t *testing.T) {
}
func TestAccAbortBind(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
@ -130,7 +131,7 @@ func TestAccAbortBind(t *testing.T) {
}
func TestAccGenerateAndBind(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)

View File

@ -5,6 +5,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/google/uuid"
@ -15,7 +16,7 @@ import (
func TestAccDashboards(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
warehouseID := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
warehouseID := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
uniqueID := uuid.New().String()
root, err := initTestTemplate(t, ctx, "dashboards", map[string]any{
"unique_id": uniqueID,

View File

@ -9,12 +9,13 @@ import (
"github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)

View File

@ -95,7 +95,7 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 {
require.NoError(t, err)
resp, err := w.Jobs.Create(ctx, jobs.CreateJob{
Name: internal.RandomName("generated-job-"),
Name: testutil.RandomName("generated-job-"),
Tasks: []jobs.Task{
{
TaskKey: "test",

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/pipelines"
@ -58,7 +59,7 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) {
generatedYaml := string(data)
// Replace pipeline name
generatedYaml = strings.ReplaceAll(generatedYaml, name, internal.RandomName("copy-generated-pipeline-"))
generatedYaml = strings.ReplaceAll(generatedYaml, name, testutil.RandomName("copy-generated-pipeline-"))
err = os.WriteFile(fileName, []byte(generatedYaml), 0o644)
require.NoError(t, err)
@ -94,10 +95,10 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string,
err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')"))
require.NoError(t, err)
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(env)
name := internal.RandomName("generated-pipeline-")
name := testutil.RandomName("generated-pipeline-")
resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{
Name: name,
Libraries: []pipelines.PipelineLibrary{

View File

@ -13,7 +13,7 @@ import (
)
func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) {
cloudEnv := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
cloudEnv := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(cloudEnv)
tmpDir := t.TempDir()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
@ -54,7 +54,7 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
}
func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions:
@ -78,7 +78,7 @@ func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
}
func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions:

View File

@ -3,6 +3,7 @@ package bundle
import (
"context"
"encoding/json"
"path/filepath"
"testing"
"github.com/databricks/cli/internal/testutil"
@ -16,7 +17,7 @@ func TestAccBundleValidate(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
tmpDir := t.TempDir()
testutil.WriteFile(t,
testutil.WriteFile(t, filepath.Join(tmpDir, "databricks.yml"),
`
bundle:
name: "foobar"
@ -33,7 +34,7 @@ resources:
inner_loop:
name: inner loop
`, tmpDir, "databricks.yml")
`)
ctx := context.Background()
stdout, err := validateBundle(t, ctx, tmpDir)

View File

@ -6,6 +6,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/stretchr/testify/assert"
@ -13,7 +14,7 @@ import (
)
func TestAccClustersList(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "clusters", "list")
outStr := stdout.String()
@ -28,7 +29,7 @@ func TestAccClustersList(t *testing.T) {
}
func TestAccClustersGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
clusterId := findValidClusterID(t)
stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId)

View File

@ -5,6 +5,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
@ -25,7 +26,7 @@ func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) {
dashboardName := "New Dashboard"
dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`)
warehouseId := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
warehouseId := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-")

View File

@ -12,6 +12,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -410,33 +411,33 @@ func TestAccFilerWorkspaceNotebook(t *testing.T) {
{
name: "pythonJupyterNb.ipynb",
nameWithoutExt: "pythonJupyterNb",
content1: readFile(t, "testdata/notebooks/py1.ipynb"),
content1: testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"),
expected1: "# Databricks notebook source\nprint(1)",
content2: readFile(t, "testdata/notebooks/py2.ipynb"),
content2: testutil.ReadFile(t, "testdata/notebooks/py2.ipynb"),
expected2: "# Databricks notebook source\nprint(2)",
},
{
name: "rJupyterNb.ipynb",
nameWithoutExt: "rJupyterNb",
content1: readFile(t, "testdata/notebooks/r1.ipynb"),
content1: testutil.ReadFile(t, "testdata/notebooks/r1.ipynb"),
expected1: "# Databricks notebook source\nprint(1)",
content2: readFile(t, "testdata/notebooks/r2.ipynb"),
content2: testutil.ReadFile(t, "testdata/notebooks/r2.ipynb"),
expected2: "# Databricks notebook source\nprint(2)",
},
{
name: "scalaJupyterNb.ipynb",
nameWithoutExt: "scalaJupyterNb",
content1: readFile(t, "testdata/notebooks/scala1.ipynb"),
content1: testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb"),
expected1: "// Databricks notebook source\nprintln(1)",
content2: readFile(t, "testdata/notebooks/scala2.ipynb"),
content2: testutil.ReadFile(t, "testdata/notebooks/scala2.ipynb"),
expected2: "// Databricks notebook source\nprintln(2)",
},
{
name: "sqlJupyterNotebook.ipynb",
nameWithoutExt: "sqlJupyterNotebook",
content1: readFile(t, "testdata/notebooks/sql1.ipynb"),
content1: testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb"),
expected1: "-- Databricks notebook source\nselect 1",
content2: readFile(t, "testdata/notebooks/sql2.ipynb"),
content2: testutil.ReadFile(t, "testdata/notebooks/sql2.ipynb"),
expected2: "-- Databricks notebook source\nselect 2",
},
}
@ -483,13 +484,13 @@ func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
{"foo.r", "print('foo')"},
{"foo.scala", "println('foo')"},
{"foo.sql", "SELECT 'foo'"},
{"py1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")},
{"py1.ipynb", testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")},
{"pyNb.py", "# Databricks notebook source\nprint('first upload'))"},
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")},
{"r1.ipynb", testutil.ReadFile(t, "testdata/notebooks/r1.ipynb")},
{"rNb.r", "# Databricks notebook source\nprint('first upload'))"},
{"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")},
{"scala1.ipynb", testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb")},
{"scalaNb.scala", "// Databricks notebook source\n println(\"first upload\"))"},
{"sql1.ipynb", readFile(t, "testdata/notebooks/sql1.ipynb")},
{"sql1.ipynb", testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb")},
{"sqlNb.sql", "-- Databricks notebook source\n SELECT \"first upload\""},
}
@ -554,10 +555,10 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer {
}{
{"foo.py", "# Databricks notebook source\nprint('first upload'))"},
{"bar.py", "print('foo')"},
{"p1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")},
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")},
{"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")},
{"sql1.ipynb", readFile(t, "testdata/notebooks/sql1.ipynb")},
{"p1.ipynb", testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")},
{"r1.ipynb", testutil.ReadFile(t, "testdata/notebooks/r1.ipynb")},
{"scala1.ipynb", testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb")},
{"sql1.ipynb", testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb")},
{"pretender", "not a notebook"},
{"dir/file.txt", "file content"},
{"scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')"},
@ -729,7 +730,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
wf, _ := setupWsfsExtensionsFiler(t)
// Create a notebook
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb")))
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")))
require.NoError(t, err)
// Reading foo should fail. Even though the WSFS name for the notebook is foo
@ -748,7 +749,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) {
wf, _ := setupWsfsExtensionsFiler(t)
// Create a notebook
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb")))
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")))
require.NoError(t, err)
// Stating foo should fail. Even though the WSFS name for the notebook is foo
@ -767,7 +768,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T)
wf, _ := setupWsfsExtensionsFiler(t)
// Create a notebook
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb")))
err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")))
require.NoError(t, err)
// Deleting foo should fail. Even though the WSFS name for the notebook is foo
@ -849,25 +850,25 @@ func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
language: "python",
sourceName: "foo.py",
jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/py1.ipynb"),
jupyterContent: testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"),
},
{
language: "r",
sourceName: "foo.r",
jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/r1.ipynb"),
jupyterContent: testutil.ReadFile(t, "testdata/notebooks/r1.ipynb"),
},
{
language: "scala",
sourceName: "foo.scala",
jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/scala1.ipynb"),
jupyterContent: testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb"),
},
{
language: "sql",
sourceName: "foo.sql",
jupyterName: "foo.ipynb",
jupyterContent: readFile(t, "testdata/notebooks/sql1.ipynb"),
jupyterContent: testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb"),
},
} {
t.Run("jupyter_"+tc.language, func(t *testing.T) {

View File

@ -7,6 +7,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/assert"
@ -70,14 +71,14 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) {
}
func TestAccFsCatForDbfsInvalidScheme(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
assert.ErrorContains(t, err, "invalid scheme: dab")
}
func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()

View File

@ -10,6 +10,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -355,7 +356,7 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
}
func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b")
assert.Equal(t, "invalid scheme: https", err.Error())

View File

@ -10,6 +10,7 @@ import (
"testing"
_ "github.com/databricks/cli/cmd/fs"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -166,7 +167,7 @@ func TestAccFsLsForNonexistingDir(t *testing.T) {
func TestAccFsLsWithoutScheme(t *testing.T) {
t.Parallel()
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json")
assert.ErrorIs(t, err, fs.ErrNotExist)

View File

@ -6,12 +6,13 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert"
)
func TestAccGitClone(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
ctx := context.Background()
@ -33,7 +34,7 @@ func TestAccGitClone(t *testing.T) {
}
func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
ctx := context.Background()
@ -54,7 +55,7 @@ func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
}
func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()

View File

@ -8,6 +8,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert"
@ -42,7 +43,7 @@ func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
targetPath := acc.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-"))
targetPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-"))
stdout, stderr := RequireSuccessfulRun(t, "repos", "create", examplesRepoUrl, examplesRepoProvider, "--path", targetPath)
t.Cleanup(func() {
RequireSuccessfulRun(t, "repos", "delete", targetPath)
@ -69,7 +70,7 @@ func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
rootPath := acc.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-"))
rootPath := testutil.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-"))
_, stderr := RequireSuccessfulRun(t, "workspace", "mkdirs", path.Join(rootPath, "a/b/c"))
t.Cleanup(func() {
RequireSuccessfulRun(t, "workspace", "delete", "--recursive", rootPath)

View File

@ -8,7 +8,6 @@ import (
"errors"
"fmt"
"io"
"math/rand"
"net/http"
"os"
"path"
@ -21,6 +20,7 @@ import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/cmd"
@ -41,30 +41,6 @@ import (
_ "github.com/databricks/cli/cmd/workspace"
)
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
// GetEnvOrSkipTest proceeds with test only with that env variable
func GetEnvOrSkipTest(t *testing.T, name string) string {
value := os.Getenv(name)
if value == "" {
t.Skipf("Environment variable %s is missing", name)
}
return value
}
// RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-")
func RandomName(prefix ...string) string {
randLen := 12
b := make([]byte, randLen)
for i := range b {
b[i] = charset[rand.Intn(randLen)]
}
if len(prefix) > 0 {
return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b)
}
return string(b)
}
// Helper for running the root command in the background.
// It ensures that the background goroutine terminates upon
// test completion through cancelling the command context.
@ -355,22 +331,6 @@ func RequireErrorRun(t *testing.T, args ...string) (bytes.Buffer, bytes.Buffer,
return stdout, stderr, err
}
func readFile(t *testing.T, name string) string {
b, err := os.ReadFile(name)
require.NoError(t, err)
return string(b)
}
func writeFile(t *testing.T, name, body string) string {
f, err := os.Create(filepath.Join(t.TempDir(), name))
require.NoError(t, err)
_, err = f.WriteString(body)
require.NoError(t, err)
f.Close()
return f.Name()
}
func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
@ -443,7 +403,7 @@ func TemporaryWorkspaceDir(t *testing.T, w *databricks.WorkspaceClient) string {
me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("integration-test-wsfs-"))
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName("integration-test-wsfs-"))
t.Logf("Creating %s", basePath)
err = w.Workspace.MkdirsByPath(ctx, basePath)
@ -467,7 +427,7 @@ func TemporaryWorkspaceDir(t *testing.T, w *databricks.WorkspaceClient) string {
func TemporaryDbfsDir(t *testing.T, w *databricks.WorkspaceClient) string {
ctx := context.Background()
path := fmt.Sprintf("/tmp/%s", RandomName("integration-test-dbfs-"))
path := fmt.Sprintf("/tmp/%s", testutil.RandomName("integration-test-dbfs-"))
t.Logf("Creating DBFS folder:%s", path)
err := w.Dbfs.MkdirsByPath(ctx, path)
@ -495,7 +455,7 @@ func TemporaryUcVolume(t *testing.T, w *databricks.WorkspaceClient) string {
// Create a schema
schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main",
Name: RandomName("test-schema-"),
Name: testutil.RandomName("test-schema-"),
})
require.NoError(t, err)
t.Cleanup(func() {
@ -528,7 +488,7 @@ func TemporaryRepo(t *testing.T, w *databricks.WorkspaceClient) string {
me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("integration-test-repo-"))
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("integration-test-repo-"))
t.Logf("Creating repo:%s", repoPath)
repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{
@ -563,7 +523,7 @@ func GetNodeTypeId(env string) string {
}
func setupLocalFiler(t *testing.T) (filer.Filer, string) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmp := t.TempDir()
f, err := filer.NewLocalClient(tmp)
@ -610,7 +570,7 @@ func setupDbfsFiler(t *testing.T) (filer.Filer, string) {
}
func setupUcVolumesFiler(t *testing.T) (filer.Filer, string) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skip("Skipping tests that require a UC Volume when metastore id is not set.")

View File

@ -19,7 +19,7 @@ import (
)
func TestAccBundleInitErrorOnUnknownFields(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
_, _, err := RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir)
@ -47,7 +47,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
w, err := databricks.NewWorkspaceClient(&databricks.Config{})
require.NoError(t, err)
projectName := RandomName("project_name_")
projectName := testutil.RandomName("project_name_")
// Create a config file with the project name and root dir
initConfig := map[string]string{
@ -101,7 +101,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
}
func TestAccBundleInitHelpers(t *testing.T) {
env := GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
w, err := databricks.NewWorkspaceClient(&databricks.Config{})

View File

@ -6,13 +6,14 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAccCreateJob(t *testing.T) {
acc.WorkspaceTest(t)
env := GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
if env != "azure" {
t.Skipf("Not running test on cloud %s", env)
}

View File

@ -11,6 +11,7 @@ import (
"testing"
"time"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
lockpkg "github.com/databricks/cli/libs/locker"
"github.com/databricks/databricks-sdk-go"
@ -28,7 +29,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
me, err := wsc.CurrentUser.Me(ctx)
assert.NoError(t, err)
remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName(projectNamePrefix))
remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName(projectNamePrefix))
repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: remoteProjectRoot,
Url: EmptyRepoUrl,
@ -44,7 +45,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
}
func TestAccLock(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.TODO()
wsc, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
@ -164,7 +165,7 @@ func TestAccLock(t *testing.T) {
}
func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.Filer) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)

View File

@ -15,6 +15,7 @@ import (
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs"
@ -75,8 +76,8 @@ var sparkVersions = []string{
func TestAccRunPythonTaskWorkspace(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
unsupportedSparkVersionsForWheel := []string{
"11.3.x-scala2.12",
@ -96,8 +97,8 @@ func TestAccRunPythonTaskWorkspace(t *testing.T) {
func TestAccRunPythonTaskDBFS(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
runPythonTasks(t, prepareDBFSFiles(t), testOpts{
name: "Python tasks from DBFS",
@ -109,8 +110,8 @@ func TestAccRunPythonTaskDBFS(t *testing.T) {
func TestAccRunPythonTaskRepo(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
runPythonTasks(t, prepareRepoFiles(t), testOpts{
name: "Python tasks from Repo",
@ -121,7 +122,7 @@ func TestAccRunPythonTaskRepo(t *testing.T) {
}
func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
w := tw.w

View File

@ -6,6 +6,7 @@ import (
"strconv"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/workspace"
@ -16,7 +17,7 @@ import (
func synthesizeTemporaryRepoPath(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) string {
me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-integration-"))
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("empty-repo-integration-"))
// Cleanup if repo was created at specified path.
t.Cleanup(func() {
@ -44,7 +45,7 @@ func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx contex
}
func TestAccReposCreateWithProvider(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
@ -61,7 +62,7 @@ func TestAccReposCreateWithProvider(t *testing.T) {
}
func TestAccReposCreateWithoutProvider(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
@ -78,7 +79,7 @@ func TestAccReposCreateWithoutProvider(t *testing.T) {
}
func TestAccReposGet(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
@ -107,7 +108,7 @@ func TestAccReposGet(t *testing.T) {
}
func TestAccReposUpdate(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
@ -128,7 +129,7 @@ func TestAccReposUpdate(t *testing.T) {
}
func TestAccReposDeleteByID(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
@ -147,7 +148,7 @@ func TestAccReposDeleteByID(t *testing.T) {
}
func TestAccReposDeleteByPath(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()

View File

@ -7,6 +7,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -18,7 +19,7 @@ func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) {
}
func temporarySecretScope(ctx context.Context, t *acc.WorkspaceT) string {
scope := acc.RandomName("cli-acc-")
scope := testutil.RandomName("cli-acc-")
err := t.W.Secrets.CreateScope(ctx, workspace.CreateScope{
Scope: scope,
})

View File

@ -4,6 +4,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert"
)
@ -11,7 +12,7 @@ func TestAccStorageCredentialsListRendersResponse(t *testing.T) {
_, _ = acc.WorkspaceTest(t)
// Check if metastore is assigned for the workspace, otherwise test will fail
t.Log(GetEnvOrSkipTest(t, "TEST_METASTORE_ID"))
t.Log(testutil.GetEnvOrSkipTest(t, "TEST_METASTORE_ID"))
stdout, stderr := RequireSuccessfulRun(t, "storage-credentials", "list")
assert.NotEmpty(t, stdout)

View File

@ -16,6 +16,7 @@ import (
"time"
_ "github.com/databricks/cli/cmd/sync"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/sync"
"github.com/databricks/cli/libs/testfile"
@ -36,7 +37,7 @@ var (
func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Context) (localRoot, remoteRoot string) {
me, err := wsc.CurrentUser.Me(ctx)
require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-sync-integration-"))
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("empty-repo-sync-integration-"))
repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: repoPath,
@ -71,7 +72,7 @@ type syncTest struct {
}
func setupSyncTest(t *testing.T, args ...string) *syncTest {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w := databricks.Must(databricks.NewWorkspaceClient())
localRoot := t.TempDir()
@ -499,7 +500,7 @@ func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
}
func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background()
@ -508,7 +509,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
require.NoError(t, err)
// Hypothetical repo path doesn't exist.
nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("doesnt-exist-"))
nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("doesnt-exist-"))
err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath, nil)
assert.ErrorContains(t, err, " does not exist; please create it first")
@ -519,7 +520,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
}
func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background()
@ -541,14 +542,14 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
}
func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background()
me, err := wsc.CurrentUser.Me(ctx)
require.NoError(t, err)
remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("ensure-path-exists-test-"))
remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName("ensure-path-exists-test-"))
err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath, me)
assert.NoError(t, err)

View File

@ -28,7 +28,7 @@ func testTags(t *testing.T, tags map[string]string) error {
ctx := context.Background()
resp, err := w.Jobs.Create(ctx, jobs.CreateJob{
Name: RandomName("test-tags-"),
Name: testutil.RandomName("test-tags-"),
Tasks: []jobs.Task{
{
TaskKey: "test",

View File

@ -39,15 +39,6 @@ func CleanupEnvironment(t *testing.T) {
}
}
// GetEnvOrSkipTest proceeds with test only with that env variable
func GetEnvOrSkipTest(t *testing.T, name string) string {
value := os.Getenv(name)
if value == "" {
t.Skipf("Environment variable %s is missing", name)
}
return value
}
// Changes into specified directory for the duration of the test.
// Returns the current working directory.
func Chdir(t *testing.T, dir string) string {

View File

@ -31,8 +31,8 @@ func Touch(t *testing.T, elems ...string) string {
return path
}
func WriteFile(t *testing.T, content string, elems ...string) string {
path := filepath.Join(elems...)
// WriteFile writes content to a file.
func WriteFile(t *testing.T, path, content string) {
err := os.MkdirAll(filepath.Dir(path), 0o755)
require.NoError(t, err)
@ -44,5 +44,12 @@ func WriteFile(t *testing.T, content string, elems ...string) string {
err = f.Close()
require.NoError(t, err)
return path
}
// ReadFile reads a file and returns its content as a string.
func ReadFile(t require.TestingT, path string) string {
b, err := os.ReadFile(path)
require.NoError(t, err)
return string(b)
}

View File

@ -1,4 +1,4 @@
package acc
package testutil
import (
"fmt"

View File

@ -12,6 +12,7 @@ import (
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/workspace"
@ -20,7 +21,7 @@ import (
)
func TestAccWorkspaceList(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/")
outStr := stdout.String()
@ -42,7 +43,7 @@ func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) {
}
func TestAccWorkpaceExportPrintsContents(t *testing.T) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w := databricks.Must(databricks.NewWorkspaceClient())