Remove calls to `testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")` (#2014)

## Changes

These calls are no longer necessary now that integration tests use a
main function that performs this check. This change updates integration
tests that call this function. Of those, the call sites that initialize
a workspace client are updated to use `acc.WorkspaceTest(t)` to get one.

## Tests

n/a
This commit is contained in:
Pieter Noordhuis 2024-12-13 17:09:51 +01:00 committed by GitHub
parent 58dfa70e50
commit 3b00d7861e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 39 additions and 131 deletions

View File

@ -17,9 +17,6 @@ import (
)
func TestBindJobToExistingJob(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: wt, w: wt.W}
@ -82,9 +79,6 @@ func TestBindJobToExistingJob(t *testing.T) {
}
func TestAbortBind(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: wt, w: wt.W}
@ -131,9 +125,6 @@ func TestAbortBind(t *testing.T) {
}
func TestGenerateAndBind(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: wt, w: wt.W}

View File

@ -14,9 +14,6 @@ import (
)
func TestFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W

View File

@ -11,17 +11,15 @@ import (
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/iamutil"
"github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestBundleInitErrorOnUnknownFields(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
_, _, err := testcli.RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir)
assert.EqualError(t, err, "failed to compute file content for bar.tmpl. variable \"does_not_exist\" not defined")
@ -40,15 +38,14 @@ func TestBundleInitErrorOnUnknownFields(t *testing.T) {
// make changes that can break the MLOps Stacks DAB. In which case we should
// skip this test until the MLOps Stacks DAB is updated to work again.
func TestBundleInitOnMlopsStacks(t *testing.T) {
env := testutil.GetCloud(t).String()
_, wt := acc.WorkspaceTest(t)
w := wt.W
tmpDir1 := t.TempDir()
tmpDir2 := t.TempDir()
w, err := databricks.NewWorkspaceClient(&databricks.Config{})
require.NoError(t, err)
projectName := testutil.RandomName("project_name_")
env := testutil.GetCloud(t).String()
// Create a config file with the project name and root dir
initConfig := map[string]string{
@ -102,23 +99,22 @@ func TestBundleInitOnMlopsStacks(t *testing.T) {
}
func TestBundleInitHelpers(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
w, err := databricks.NewWorkspaceClient(&databricks.Config{})
require.NoError(t, err)
me, err := w.CurrentUser.Me(context.Background())
me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)
var smallestNode string
switch env {
case "azure":
switch testutil.GetCloud(t) {
case testutil.Azure:
smallestNode = "Standard_D3_v2"
case "gcp":
case testutil.GCP:
smallestNode = "n1-standard-4"
default:
case testutil.AWS:
smallestNode = "i3.xlarge"
default:
t.Fatal("Unknown cloud environment")
}
tests := []struct {

View File

@ -52,7 +52,6 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
}
func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions:
@ -76,7 +75,6 @@ func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
}
func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions:

View File

@ -14,8 +14,6 @@ import (
)
func TestBundleValidate(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
tmpDir := t.TempDir()
testutil.WriteFile(t, filepath.Join(tmpDir, "databricks.yml"),
`

View File

@ -4,13 +4,10 @@ import (
"testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert"
)
func TestAlertsCreateErrWhenNoArguments(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "alerts-legacy", "create")
assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error())
}

View File

@ -16,8 +16,6 @@ import (
)
func TestApiGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := testcli.RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me")
// Deserialize SCIM API response.
@ -31,9 +29,7 @@ func TestApiGet(t *testing.T) {
}
func TestApiPost(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
if env == "gcp" {
if testutil.GetCloud(t) == testutil.GCP {
t.Skip("DBFS REST API is disabled on gcp")
}

View File

@ -6,14 +6,11 @@ import (
"testing"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/require"
)
func TestAuthDescribeSuccess(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Skipf("Skipping because of https://github.com/databricks/cli/issues/2010")
stdout, _ := testcli.RequireSuccessfulRun(t, "auth", "describe")
@ -35,8 +32,6 @@ func TestAuthDescribeSuccess(t *testing.T) {
}
func TestAuthDescribeFailure(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
t.Skipf("Skipping because of https://github.com/databricks/cli/issues/2010")
stdout, _ := testcli.RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent")

View File

@ -7,7 +7,6 @@ import (
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/stretchr/testify/assert"
@ -15,8 +14,6 @@ import (
)
func TestClustersList(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := testcli.RequireSuccessfulRun(t, "clusters", "list")
outStr := stdout.String()
assert.Contains(t, outStr, "ID")
@ -30,8 +27,6 @@ func TestClustersList(t *testing.T) {
}
func TestClustersGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
clusterId := findValidClusterID(t)
stdout, stderr := testcli.RequireSuccessfulRun(t, "clusters", "get", clusterId)
outStr := stdout.String()

View File

@ -9,7 +9,6 @@ import (
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -72,8 +71,6 @@ func TestFsCatOnNonExistentFile(t *testing.T) {
}
func TestFsCatForDbfsInvalidScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
assert.ErrorContains(t, err, "invalid scheme: dab")
}

View File

@ -357,8 +357,6 @@ func TestFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
}
func TestFsCpErrorsOnInvalidScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b")
assert.Equal(t, "invalid scheme: https", err.Error())
}

View File

@ -13,8 +13,6 @@ import (
)
func setupLocalFiler(t testutil.TestingT) (filer.Filer, string) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmp := t.TempDir()
f, err := filer.NewLocalClient(tmp)
require.NoError(t, err)

View File

@ -168,8 +168,6 @@ func TestFsLsForNonexistingDir(t *testing.T) {
func TestFsLsWithoutScheme(t *testing.T) {
t.Parallel()
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json")
assert.ErrorIs(t, err, fs.ErrNotExist)
}

View File

@ -5,7 +5,6 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert"
@ -13,11 +12,7 @@ import (
)
func TestCreateJob(t *testing.T) {
acc.WorkspaceTest(t)
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
if env != "azure" {
t.Skipf("Not running test on cloud %s", env)
}
testutil.Require(t, testutil.Azure)
stdout, stderr := testcli.RequireSuccessfulRun(t, "jobs", "create", "--json", "@testdata/create_job_without_workers.json", "--log-level=debug")
assert.Empty(t, stderr.String())
var output map[string]int

View File

@ -6,6 +6,7 @@ import (
"strconv"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
@ -48,11 +49,8 @@ func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx contex
}
func TestReposCreateWithProvider(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
repoPath := synthesizeTemporaryRepoPath(t, w, ctx)
_, stderr := testcli.RequireSuccessfulRun(t, "repos", "create", repoUrl, "gitHub", "--path", repoPath)
@ -65,11 +63,8 @@ func TestReposCreateWithProvider(t *testing.T) {
}
func TestReposCreateWithoutProvider(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
repoPath := synthesizeTemporaryRepoPath(t, w, ctx)
_, stderr := testcli.RequireSuccessfulRun(t, "repos", "create", repoUrl, "--path", repoPath)
@ -82,11 +77,8 @@ func TestReposCreateWithoutProvider(t *testing.T) {
}
func TestReposGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
repoId, repoPath := createTemporaryRepo(t, w, ctx)
@ -102,7 +94,7 @@ func TestReposGet(t *testing.T) {
assert.Equal(t, byIdOutput.String(), byPathOutput.String())
// Get by path fails
_, stderr, err = testcli.RequireErrorRun(t, "repos", "get", repoPath+"-doesntexist", "--output=json")
_, stderr, err := testcli.RequireErrorRun(t, "repos", "get", repoPath+"-doesntexist", "--output=json")
assert.ErrorContains(t, err, "failed to look up repo")
// Get by path resolves to something other than a repo
@ -111,11 +103,8 @@ func TestReposGet(t *testing.T) {
}
func TestReposUpdate(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
repoId, repoPath := createTemporaryRepo(t, w, ctx)
@ -132,11 +121,8 @@ func TestReposUpdate(t *testing.T) {
}
func TestReposDeleteByID(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
repoId, _ := createTemporaryRepo(t, w, ctx)
@ -146,16 +132,13 @@ func TestReposDeleteByID(t *testing.T) {
assert.Equal(t, "", stderr.String())
// Check it was actually deleted
_, err = w.Repos.GetByRepoId(ctx, repoId)
_, err := w.Repos.GetByRepoId(ctx, repoId)
assert.True(t, apierr.IsMissing(err), err)
}
func TestReposDeleteByPath(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
repoId, repoPath := createTemporaryRepo(t, w, ctx)
@ -165,6 +148,6 @@ func TestReposDeleteByPath(t *testing.T) {
assert.Equal(t, "", stderr.String())
// Check it was actually deleted
_, err = w.Repos.GetByRepoId(ctx, repoId)
_, err := w.Repos.GetByRepoId(ctx, repoId)
assert.True(t, apierr.IsMissing(err), err)
}

View File

@ -501,10 +501,8 @@ func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
}
func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background()
ctx, wt := acc.WorkspaceTest(t)
wsc := wt.W
me, err := wsc.CurrentUser.Me(ctx)
require.NoError(t, err)
@ -521,10 +519,9 @@ func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
}
func TestSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx, wt := acc.WorkspaceTest(t)
wsc := wt.W
wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background()
_, remoteRepoPath := setupRepo(t, wsc, ctx)
// Repo itself is usable.
@ -543,10 +540,9 @@ func TestSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
}
func TestSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx, wt := acc.WorkspaceTest(t)
wsc := wt.W
wsc := databricks.Must(databricks.NewWorkspaceClient())
ctx := context.Background()
me, err := wsc.CurrentUser.Me(ctx)
require.NoError(t, err)

View File

@ -13,7 +13,6 @@ import (
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
@ -21,8 +20,6 @@ import (
)
func TestWorkspaceList(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "list", "/")
outStr := stdout.String()
assert.Contains(t, outStr, "ID")

View File

@ -16,8 +16,6 @@ import (
)
func setupLocalFiler(t testutil.TestingT) (filer.Filer, string) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmp := t.TempDir()
f, err := filer.NewLocalClient(tmp)
require.NoError(t, err)

View File

@ -6,14 +6,11 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert"
)
func TestGitClone(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
ctx := context.Background()
var err error
@ -34,8 +31,6 @@ func TestGitClone(t *testing.T) {
}
func TestGitCloneOnNonDefaultBranch(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
ctx := context.Background()
var err error
@ -55,8 +50,6 @@ func TestGitCloneOnNonDefaultBranch(t *testing.T) {
}
func TestGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
err := git.Clone(context.Background(), "https://github.com/monalisa/doesnot-exist.git", "", tmpDir)

View File

@ -46,10 +46,8 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
}
func TestLock(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.TODO()
wsc, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx, wt := acc.WorkspaceTest(t)
wsc := wt.W
remoteProjectRoot := createRemoteTestProject(t, "lock-acc-", wsc)
// 5 lockers try to acquire a lock at the same time

View File

@ -78,7 +78,6 @@ var sparkVersions = []string{
func TestRunPythonTaskWorkspace(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
unsupportedSparkVersionsForWheel := []string{
"11.3.x-scala2.12",
@ -99,7 +98,6 @@ func TestRunPythonTaskWorkspace(t *testing.T) {
func TestRunPythonTaskDBFS(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
runPythonTasks(t, prepareDBFSFiles(t), testOpts{
name: "Python tasks from DBFS",
@ -112,7 +110,6 @@ func TestRunPythonTaskDBFS(t *testing.T) {
func TestRunPythonTaskRepo(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
runPythonTasks(t, prepareRepoFiles(t), testOpts{
name: "Python tasks from Repo",
@ -123,9 +120,6 @@ func TestRunPythonTaskRepo(t *testing.T) {
}
func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
w := tw.w
nodeTypeId := testutil.GetCloud(t).NodeTypeID()