Remove superfluous name prefix for integration tests (#2012)

## Changes

Mechanical rename of "TestAcc" -> "Test" in the test name prefix.

## Tests

n/a
This commit is contained in:
Pieter Noordhuis 2024-12-13 15:47:50 +01:00 committed by GitHub
parent c958702097
commit 4e95cb226c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 145 additions and 145 deletions

View File

@ -19,7 +19,7 @@ import (
// Verify that importing a dashboard through the Workspace API retains the identity of the underying resource,
// as well as properties exclusively accessible through the dashboards API.
func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) {
func TestDashboardAssumptions_WorkspaceImport(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
t.Parallel()

View File

@ -31,7 +31,7 @@ func touchEmptyFile(t *testing.T, path string) {
f.Close()
}
func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
dir := t.TempDir()
whlPath := filepath.Join(dir, "dist", "test.whl")
@ -95,7 +95,7 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
)
}
func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
dir := t.TempDir()
whlPath := filepath.Join(dir, "dist", "test.whl")
@ -159,7 +159,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T)
)
}
func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
if os.Getenv("TEST_METASTORE_ID") == "" {
@ -228,7 +228,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
)
}
func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
func TestUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
@ -265,7 +265,7 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
assert.Equal(t, "", stderr.String())
}
func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
func TestUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
ctx, _ := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()

View File

@ -16,7 +16,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccBindJobToExistingJob(t *testing.T) {
func TestBindJobToExistingJob(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
@ -81,7 +81,7 @@ func TestAccBindJobToExistingJob(t *testing.T) {
require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py")
}
func TestAccAbortBind(t *testing.T) {
func TestAbortBind(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
@ -130,7 +130,7 @@ func TestAccAbortBind(t *testing.T) {
require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test")
}
func TestAccGenerateAndBind(t *testing.T) {
func TestGenerateAndBind(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccDeployBundleWithCluster(t *testing.T) {
func TestDeployBundleWithCluster(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt) {

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccDashboards(t *testing.T) {
func TestDashboards(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
warehouseID := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")

View File

@ -81,7 +81,7 @@ func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.Worksp
return bundleRoot
}
func TestAccBundleDeployUcSchema(t *testing.T) {
func TestBundleDeployUcSchema(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
@ -106,7 +106,7 @@ func TestAccBundleDeployUcSchema(t *testing.T) {
assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode)
}
func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
func TestBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
@ -128,7 +128,7 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
func TestBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
@ -176,7 +176,7 @@ properties such as the 'catalog' or 'storage' are changed:
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
func TestBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
uniqueId := uuid.New().String()
@ -215,7 +215,7 @@ properties such as the 'catalog' or 'storage' are changed:
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccDeployBasicBundleLogs(t *testing.T) {
func TestDeployBasicBundleLogs(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
@ -245,7 +245,7 @@ func TestAccDeployBasicBundleLogs(t *testing.T) {
assert.Equal(t, "", stdout)
}
func TestAccDeployUcVolume(t *testing.T) {
func TestDeployUcVolume(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccBundleDeployThenRemoveResources(t *testing.T) {
func TestBundleDeployThenRemoveResources(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccDeployBasicToSharedWorkspacePath(t *testing.T) {
func TestDeployBasicToSharedWorkspacePath(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
func TestFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccBundleDestroy(t *testing.T) {
func TestBundleDestroy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccEmptyBundleDeploy(t *testing.T) {
func TestEmptyBundleDeploy(t *testing.T) {
ctx, _ := acc.WorkspaceTest(t)
// create empty bundle

View File

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccPythonWheelTaskWithEnvironmentsDeployAndRun(t *testing.T) {
func TestPythonWheelTaskWithEnvironmentsDeployAndRun(t *testing.T) {
t.Skip("Skipping test until serveless is enabled")
ctx, _ := acc.WorkspaceTest(t)

View File

@ -20,7 +20,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) {
func TestGenerateFromExistingJobAndDeploy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: wt, w: wt.W}

View File

@ -19,7 +19,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) {
func TestGenerateFromExistingPipelineAndDeploy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
gt := &generatePipelineTest{T: wt, w: wt.W}

View File

@ -19,7 +19,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccBundleInitErrorOnUnknownFields(t *testing.T) {
func TestBundleInitErrorOnUnknownFields(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
@ -39,7 +39,7 @@ func TestAccBundleInitErrorOnUnknownFields(t *testing.T) {
// 2. While rare and to be avoided if possible, the CLI reserves the right to
// make changes that can break the MLOps Stacks DAB. In which case we should
// skip this test until the MLOps Stacks DAB is updated to work again.
func TestAccBundleInitOnMlopsStacks(t *testing.T) {
func TestBundleInitOnMlopsStacks(t *testing.T) {
env := testutil.GetCloud(t).String()
tmpDir1 := t.TempDir()
@ -101,7 +101,7 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) {
assert.Contains(t, job.Settings.Name, fmt.Sprintf("dev-%s-batch-inference-job", projectName))
}
func TestAccBundleInitHelpers(t *testing.T) {
func TestBundleInitHelpers(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)

View File

@ -18,7 +18,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccJobsMetadataFile(t *testing.T) {
func TestJobsMetadataFile(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccLocalStateStaleness(t *testing.T) {
func TestLocalStateStaleness(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W

View File

@ -45,15 +45,15 @@ func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonW
require.Contains(t, out, "['my_test_code', 'param1', 'param2']")
}
func TestAccPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) {
func TestPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) {
runPythonWheelTest(t, "python_wheel_task", "13.3.x-snapshot-scala2.12", false)
}
func TestAccPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
runPythonWheelTest(t, "python_wheel_task", "12.2.x-scala2.12", true)
}
func TestAccPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
_, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt) {

View File

@ -51,7 +51,7 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
runSparkJarTestCommon(t, ctx, sparkVersion, "n/a")
}
func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0")
@ -75,7 +75,7 @@ func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
}
}
func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
testutil.RequireJDK(t, context.Background(), "1.8.0")

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccBundleValidate(t *testing.T) {
func TestBundleValidate(t *testing.T) {
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
tmpDir := t.TempDir()

View File

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/assert"
)
func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) {
func TestAlertsCreateErrWhenNoArguments(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "alerts-legacy", "create")

View File

@ -15,7 +15,7 @@ import (
"github.com/databricks/cli/internal/testutil"
)
func TestAccApiGet(t *testing.T) {
func TestApiGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, _ := testcli.RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me")
@ -30,7 +30,7 @@ func TestAccApiGet(t *testing.T) {
assert.NotNil(t, out["id"])
}
func TestAccApiPost(t *testing.T) {
func TestApiPost(t *testing.T) {
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
if env == "gcp" {

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccClustersList(t *testing.T) {
func TestClustersList(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := testcli.RequireSuccessfulRun(t, "clusters", "list")
@ -29,7 +29,7 @@ func TestAccClustersList(t *testing.T) {
assert.NotEmpty(t, clusterId)
}
func TestAccClustersGet(t *testing.T) {
func TestClustersGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
clusterId := findValidClusterID(t)

View File

@ -15,7 +15,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccFsCat(t *testing.T) {
func TestFsCat(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -35,7 +35,7 @@ func TestAccFsCat(t *testing.T) {
}
}
func TestAccFsCatOnADir(t *testing.T) {
func TestFsCatOnADir(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -54,7 +54,7 @@ func TestAccFsCatOnADir(t *testing.T) {
}
}
func TestAccFsCatOnNonExistentFile(t *testing.T) {
func TestFsCatOnNonExistentFile(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -71,14 +71,14 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) {
}
}
func TestAccFsCatForDbfsInvalidScheme(t *testing.T) {
func TestFsCatForDbfsInvalidScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
assert.ErrorContains(t, err, "invalid scheme: dab")
}
func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) {
func TestFsCatDoesNotSupportOutputModeJson(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W

View File

@ -18,7 +18,7 @@ func setupCompletionFile(t *testing.T, f filer.Filer) {
require.NoError(t, err)
}
func TestAccFsCompletion(t *testing.T) {
func TestFsCompletion(t *testing.T) {
f, tmpDir := setupDbfsFiler(t)
setupCompletionFile(t, f)

View File

@ -122,7 +122,7 @@ func copyTests() []cpTest {
}
}
func TestAccFsCpDir(t *testing.T) {
func TestFsCpDir(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -142,7 +142,7 @@ func TestAccFsCpDir(t *testing.T) {
}
}
func TestAccFsCpFileToFile(t *testing.T) {
func TestFsCpFileToFile(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -162,7 +162,7 @@ func TestAccFsCpFileToFile(t *testing.T) {
}
}
func TestAccFsCpFileToDir(t *testing.T) {
func TestFsCpFileToDir(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -182,7 +182,7 @@ func TestAccFsCpFileToDir(t *testing.T) {
}
}
func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) {
func TestFsCpFileToDirForWindowsPaths(t *testing.T) {
if runtime.GOOS != "windows" {
t.Skip("Skipping test on non-windows OS")
}
@ -198,7 +198,7 @@ func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) {
assertTargetFile(t, ctx, targetFiler, "foo.txt")
}
func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) {
func TestFsCpDirToDirFileNotOverwritten(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -223,7 +223,7 @@ func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) {
}
}
func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) {
func TestFsCpFileToDirFileNotOverwritten(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -246,7 +246,7 @@ func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) {
}
}
func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) {
func TestFsCpFileToFileFileNotOverwritten(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -269,7 +269,7 @@ func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) {
}
}
func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) {
func TestFsCpDirToDirWithOverwriteFlag(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -292,7 +292,7 @@ func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) {
}
}
func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) {
func TestFsCpFileToFileWithOverwriteFlag(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -315,7 +315,7 @@ func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) {
}
}
func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) {
func TestFsCpFileToDirWithOverwriteFlag(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {
@ -338,7 +338,7 @@ func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) {
}
}
func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
func TestFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -356,14 +356,14 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) {
}
}
func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) {
func TestFsCpErrorsOnInvalidScheme(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, _, err := testcli.RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b")
assert.Equal(t, "invalid scheme: https", err.Error())
}
func TestAccFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) {
func TestFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) {
t.Parallel()
for _, testCase := range copyTests() {

View File

@ -40,7 +40,7 @@ func setupLsFiles(t *testing.T, f filer.Filer) {
require.NoError(t, err)
}
func TestAccFsLs(t *testing.T) {
func TestFsLs(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -73,7 +73,7 @@ func TestAccFsLs(t *testing.T) {
}
}
func TestAccFsLsWithAbsolutePaths(t *testing.T) {
func TestFsLsWithAbsolutePaths(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -106,7 +106,7 @@ func TestAccFsLsWithAbsolutePaths(t *testing.T) {
}
}
func TestAccFsLsOnFile(t *testing.T) {
func TestFsLsOnFile(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -124,7 +124,7 @@ func TestAccFsLsOnFile(t *testing.T) {
}
}
func TestAccFsLsOnEmptyDir(t *testing.T) {
func TestFsLsOnEmptyDir(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -147,7 +147,7 @@ func TestAccFsLsOnEmptyDir(t *testing.T) {
}
}
func TestAccFsLsForNonexistingDir(t *testing.T) {
func TestFsLsForNonexistingDir(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -165,7 +165,7 @@ func TestAccFsLsForNonexistingDir(t *testing.T) {
}
}
func TestAccFsLsWithoutScheme(t *testing.T) {
func TestFsLsWithoutScheme(t *testing.T) {
t.Parallel()
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccFsMkdir(t *testing.T) {
func TestFsMkdir(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -38,7 +38,7 @@ func TestAccFsMkdir(t *testing.T) {
}
}
func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) {
func TestFsMkdirCreatesIntermediateDirectories(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -75,7 +75,7 @@ func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) {
}
}
func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) {
func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -98,7 +98,7 @@ func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) {
}
}
func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) {
func TestFsMkdirWhenFileExistsAtPath(t *testing.T) {
t.Parallel()
t.Run("dbfs", func(t *testing.T) {

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccFsRmFile(t *testing.T) {
func TestFsRmFile(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -43,7 +43,7 @@ func TestAccFsRmFile(t *testing.T) {
}
}
func TestAccFsRmEmptyDir(t *testing.T) {
func TestFsRmEmptyDir(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -73,7 +73,7 @@ func TestAccFsRmEmptyDir(t *testing.T) {
}
}
func TestAccFsRmNonEmptyDirectory(t *testing.T) {
func TestFsRmNonEmptyDirectory(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -103,7 +103,7 @@ func TestAccFsRmNonEmptyDirectory(t *testing.T) {
}
}
func TestAccFsRmForNonExistentFile(t *testing.T) {
func TestFsRmForNonExistentFile(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {
@ -121,7 +121,7 @@ func TestAccFsRmForNonExistentFile(t *testing.T) {
}
}
func TestAccFsRmDirRecursively(t *testing.T) {
func TestFsRmDirRecursively(t *testing.T) {
t.Parallel()
for _, testCase := range fsTests {

View File

@ -12,7 +12,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccCreateJob(t *testing.T) {
func TestCreateJob(t *testing.T) {
acc.WorkspaceTest(t)
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
if env != "azure" {

View File

@ -47,7 +47,7 @@ func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx contex
return repoInfo.Id, repoPath
}
func TestAccReposCreateWithProvider(t *testing.T) {
func TestReposCreateWithProvider(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
@ -64,7 +64,7 @@ func TestAccReposCreateWithProvider(t *testing.T) {
assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType)
}
func TestAccReposCreateWithoutProvider(t *testing.T) {
func TestReposCreateWithoutProvider(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
@ -81,7 +81,7 @@ func TestAccReposCreateWithoutProvider(t *testing.T) {
assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType)
}
func TestAccReposGet(t *testing.T) {
func TestReposGet(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
@ -110,7 +110,7 @@ func TestAccReposGet(t *testing.T) {
assert.ErrorContains(t, err, "is not a repo")
}
func TestAccReposUpdate(t *testing.T) {
func TestReposUpdate(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
@ -131,7 +131,7 @@ func TestAccReposUpdate(t *testing.T) {
assert.Equal(t, byIdOutput.String(), byPathOutput.String())
}
func TestAccReposDeleteByID(t *testing.T) {
func TestReposDeleteByID(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
@ -150,7 +150,7 @@ func TestAccReposDeleteByID(t *testing.T) {
assert.True(t, apierr.IsMissing(err), err)
}
func TestAccReposDeleteByPath(t *testing.T) {
func TestReposDeleteByPath(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()

View File

@ -63,7 +63,7 @@ func assertSecretBytesValue(t *acc.WorkspaceT, scope, key string, expected []byt
assert.Equal(t, expected, decoded)
}
func TestAccSecretsPutSecretStringValue(tt *testing.T) {
func TestSecretsPutSecretStringValue(tt *testing.T) {
ctx, t := acc.WorkspaceTest(tt)
scope := temporarySecretScope(ctx, t)
key := "test-key"
@ -77,7 +77,7 @@ func TestAccSecretsPutSecretStringValue(tt *testing.T) {
assertSecretBytesValue(t, scope, key, []byte(value))
}
func TestAccSecretsPutSecretBytesValue(tt *testing.T) {
func TestSecretsPutSecretBytesValue(tt *testing.T) {
ctx, t := acc.WorkspaceTest(tt)
scope := temporarySecretScope(ctx, t)
key := "test-key"

View File

@ -9,7 +9,7 @@ import (
"github.com/stretchr/testify/assert"
)
func TestAccStorageCredentialsListRendersResponse(t *testing.T) {
func TestStorageCredentialsListRendersResponse(t *testing.T) {
_, _ = acc.WorkspaceTest(t)
// Check if metastore is assigned for the workspace, otherwise test will fail

View File

@ -230,7 +230,7 @@ func (a *syncTest) snapshotContains(files []string) {
assert.Equal(a.t, len(files), len(s.LastModifiedTimes))
}
func TestAccSyncFullFileSync(t *testing.T) {
func TestSyncFullFileSync(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--full", "--watch")
@ -262,7 +262,7 @@ func TestAccSyncFullFileSync(t *testing.T) {
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore"))
}
func TestAccSyncIncrementalFileSync(t *testing.T) {
func TestSyncIncrementalFileSync(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -296,7 +296,7 @@ func TestAccSyncIncrementalFileSync(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore"))
}
func TestAccSyncNestedFolderSync(t *testing.T) {
func TestSyncNestedFolderSync(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -324,7 +324,7 @@ func TestAccSyncNestedFolderSync(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore"))
}
func TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
func TestSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -357,7 +357,7 @@ func TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
assertSync.remoteExists(ctx, "dir1")
}
func TestAccSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
func TestSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -393,7 +393,7 @@ func TestAccSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
//
// In the above scenario sync should delete the empty folder and add foo to the remote
// file system
func TestAccSyncIncrementalFileOverwritesFolder(t *testing.T) {
func TestSyncIncrementalFileOverwritesFolder(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -423,7 +423,7 @@ func TestAccSyncIncrementalFileOverwritesFolder(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo"))
}
func TestAccSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
func TestSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -454,7 +454,7 @@ func TestAccSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore"))
}
func TestAccSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
func TestSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -478,7 +478,7 @@ func TestAccSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py"))
}
func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
ctx := context.Background()
assertSync := setupSyncTest(t, "--watch")
@ -500,7 +500,7 @@ func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore"))
}
func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())
@ -520,7 +520,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {
assert.ErrorContains(t, err, " does not exist; please create it first")
}
func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
func TestSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())
@ -542,7 +542,7 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) {
require.Equal(t, workspace.ObjectTypeDirectory, info.ObjectType)
}
func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) {
func TestSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
wsc := databricks.Must(databricks.NewWorkspaceClient())

View File

@ -20,7 +20,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestAccWorkspaceList(t *testing.T) {
func TestWorkspaceList(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "list", "/")
@ -42,7 +42,7 @@ func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) {
assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0")
}
func TestAccWorkpaceExportPrintsContents(t *testing.T) {
func TestWorkpaceExportPrintsContents(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
@ -93,7 +93,7 @@ func assertWorkspaceFileType(t *testing.T, ctx context.Context, f filer.Filer, p
assert.Equal(t, fileType, info.Sys().(workspace.ObjectInfo).ObjectType)
}
func TestAccExportDir(t *testing.T) {
func TestExportDir(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
targetDir := t.TempDir()
@ -138,7 +138,7 @@ func TestAccExportDir(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "a/b/c/file-b"), "def")
}
func TestAccExportDirDoesNotOverwrite(t *testing.T) {
func TestExportDirDoesNotOverwrite(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
targetDir := t.TempDir()
@ -159,7 +159,7 @@ func TestAccExportDirDoesNotOverwrite(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "local content")
}
func TestAccExportDirWithOverwriteFlag(t *testing.T) {
func TestExportDirWithOverwriteFlag(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
targetDir := t.TempDir()
@ -180,7 +180,7 @@ func TestAccExportDirWithOverwriteFlag(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace")
}
func TestAccImportDir(t *testing.T) {
func TestImportDir(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
stdout, stderr := testcli.RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug")
@ -209,7 +209,7 @@ func TestAccImportDir(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "jupyterNotebook", "# Databricks notebook source\nprint(\"jupyter\")")
}
func TestAccImportDirDoesNotOverwrite(t *testing.T) {
func TestImportDirDoesNotOverwrite(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
var err error
@ -237,7 +237,7 @@ func TestAccImportDirDoesNotOverwrite(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")")
}
func TestAccImportDirWithOverwriteFlag(t *testing.T) {
func TestImportDirWithOverwriteFlag(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
var err error
@ -265,7 +265,7 @@ func TestAccImportDirWithOverwriteFlag(t *testing.T) {
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")")
}
func TestAccExport(t *testing.T) {
func TestExport(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
var err error
@ -294,7 +294,7 @@ func TestAccExport(t *testing.T) {
assert.Contains(t, string(b), `"metadata":`, "jupyter notebooks contain the metadata field")
}
func TestAccExportWithFileFlag(t *testing.T) {
func TestExportWithFileFlag(t *testing.T) {
ctx, f, sourceDir := setupWorkspaceImportExportTest(t)
localTmpDir := t.TempDir()
@ -328,7 +328,7 @@ func TestAccExportWithFileFlag(t *testing.T) {
assertLocalFileContents(t, filepath.Join(localTmpDir, "jupyterNb.ipynb"), `"metadata":`)
}
func TestAccImportFileUsingContentFormatSource(t *testing.T) {
func TestImportFileUsingContentFormatSource(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Content = `print(1)`. Uploaded as a notebook by default
@ -345,7 +345,7 @@ func TestAccImportFileUsingContentFormatSource(t *testing.T) {
assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNb", workspace.ObjectTypeNotebook)
}
func TestAccImportFileUsingContentFormatAuto(t *testing.T) {
func TestImportFileUsingContentFormatAuto(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Content = `# Databricks notebook source\nprint(1)`. Upload as file if path has no extension.
@ -367,7 +367,7 @@ func TestAccImportFileUsingContentFormatAuto(t *testing.T) {
assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile)
}
func TestAccImportFileFormatSource(t *testing.T) {
func TestImportFileFormatSource(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
testcli.RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON")
assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")")
@ -381,7 +381,7 @@ func TestAccImportFileFormatSource(t *testing.T) {
assert.ErrorContains(t, err, "The zip file may not be valid or may be an unsupported version. Hint: Objects imported using format=SOURCE are expected to be zip encoded databricks source notebook(s) by default. Please specify a language using the --language flag if you are trying to import a single uncompressed notebook")
}
func TestAccImportFileFormatAuto(t *testing.T) {
func TestImportFileFormatAuto(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
// Upload as file if path has no extension

View File

@ -117,7 +117,7 @@ func commonFilerRecursiveDeleteTest(t *testing.T, ctx context.Context, f filer.F
assert.ErrorAs(t, err, &filer.NoSuchDirectoryError{})
}
func TestAccFilerRecursiveDelete(t *testing.T) {
func TestFilerRecursiveDelete(t *testing.T) {
t.Parallel()
for _, testCase := range []struct {
@ -228,7 +228,7 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
assert.True(t, errors.Is(err, fs.ErrInvalid))
}
func TestAccFilerReadWrite(t *testing.T) {
func TestFilerReadWrite(t *testing.T) {
t.Parallel()
for _, testCase := range []struct {
@ -337,7 +337,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
assert.False(t, entries[0].IsDir())
}
func TestAccFilerReadDir(t *testing.T) {
func TestFilerReadDir(t *testing.T) {
t.Parallel()
for _, testCase := range []struct {
@ -362,7 +362,7 @@ func TestAccFilerReadDir(t *testing.T) {
}
}
func TestAccFilerWorkspaceNotebook(t *testing.T) {
func TestFilerWorkspaceNotebook(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -471,7 +471,7 @@ func TestAccFilerWorkspaceNotebook(t *testing.T) {
}
}
func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
func TestFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
t.Parallel()
files := []struct {
@ -575,7 +575,7 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer {
return wf
}
func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) {
func TestFilerWorkspaceFilesExtensionsRead(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -612,7 +612,7 @@ func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) {
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) {
func TestFilerWorkspaceFilesExtensionsDelete(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -661,7 +661,7 @@ func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) {
filerTest{t, wf}.assertNotExists(ctx, "dir")
}
func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) {
func TestFilerWorkspaceFilesExtensionsStat(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -708,7 +708,7 @@ func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) {
}
}
func TestAccWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) {
func TestWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -723,7 +723,7 @@ func TestAccWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) {
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
func TestWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -742,7 +742,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) {
assert.NoError(t, err)
}
func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) {
func TestWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -761,7 +761,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) {
assert.NoError(t, err)
}
func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T) {
func TestWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T) {
t.Parallel()
ctx := context.Background()
@ -780,7 +780,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T)
assert.NoError(t, err)
}
func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
func TestWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
t.Parallel()
// Case 1: Writing source notebooks.

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/assert"
)
func TestAccGitClone(t *testing.T) {
func TestGitClone(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
@ -33,7 +33,7 @@ func TestAccGitClone(t *testing.T) {
assert.Contains(t, string(b), "ide")
}
func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
func TestGitCloneOnNonDefaultBranch(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()
@ -54,7 +54,7 @@ func TestAccGitCloneOnNonDefaultBranch(t *testing.T) {
assert.Contains(t, string(b), "dais-2022")
}
func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) {
func TestGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
tmpDir := t.TempDir()

View File

@ -39,7 +39,7 @@ func assertSparseGitInfo(t *testing.T, expectedRoot string, info git.RepositoryI
assert.Equal(t, expectedRoot, info.WorktreeRoot)
}
func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
func TestFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
@ -66,7 +66,7 @@ func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) {
}
}
func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
func TestFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
me, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
@ -112,7 +112,7 @@ func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) {
}
}
func TestAccFetchRepositoryInfoDotGit_FromGitRepo(t *testing.T) {
func TestFetchRepositoryInfoDotGit_FromGitRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
repo := cloneRepoLocally(t, examplesRepoUrl)
@ -139,7 +139,7 @@ func cloneRepoLocally(t *testing.T, repoUrl string) string {
return localRoot
}
func TestAccFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) {
func TestFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
tempDir := t.TempDir()
@ -161,7 +161,7 @@ func TestAccFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) {
}
}
func TestAccFetchRepositoryInfoDotGit_FromBrokenGitRepo(t *testing.T) {
func TestFetchRepositoryInfoDotGit_FromBrokenGitRepo(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
tempDir := t.TempDir()

View File

@ -45,7 +45,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
return remoteProjectRoot
}
func TestAccLock(t *testing.T) {
func TestLock(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.TODO()
wsc, err := databricks.NewWorkspaceClient()
@ -181,7 +181,7 @@ func setupLockerTest(t *testing.T) (context.Context, *lockpkg.Locker, filer.File
return ctx, locker, f
}
func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
func TestLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
ctx, locker, f := setupLockerTest(t)
var err error
@ -202,7 +202,7 @@ func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestAccLockUnlockWithAllowsLockFileNotExist(t *testing.T) {
func TestLockUnlockWithAllowsLockFileNotExist(t *testing.T) {
ctx, locker, f := setupLockerTest(t)
var err error

View File

@ -80,7 +80,7 @@ func runTagTestCases(t *testing.T, cases []tagTestCase) {
}
}
func TestAccTagKeyAWS(t *testing.T) {
func TestTagKeyAWS(t *testing.T) {
testutil.Require(t, testutil.AWS)
t.Parallel()
@ -112,7 +112,7 @@ func TestAccTagKeyAWS(t *testing.T) {
})
}
func TestAccTagValueAWS(t *testing.T) {
func TestTagValueAWS(t *testing.T) {
testutil.Require(t, testutil.AWS)
t.Parallel()
@ -138,7 +138,7 @@ func TestAccTagValueAWS(t *testing.T) {
})
}
func TestAccTagKeyAzure(t *testing.T) {
func TestTagKeyAzure(t *testing.T) {
testutil.Require(t, testutil.Azure)
t.Parallel()
@ -170,7 +170,7 @@ func TestAccTagKeyAzure(t *testing.T) {
})
}
func TestAccTagValueAzure(t *testing.T) {
func TestTagValueAzure(t *testing.T) {
testutil.Require(t, testutil.Azure)
t.Parallel()
@ -190,7 +190,7 @@ func TestAccTagValueAzure(t *testing.T) {
})
}
func TestAccTagKeyGCP(t *testing.T) {
func TestTagKeyGCP(t *testing.T) {
testutil.Require(t, testutil.GCP)
t.Parallel()
@ -222,7 +222,7 @@ func TestAccTagKeyGCP(t *testing.T) {
})
}
func TestAccTagValueGCP(t *testing.T) {
func TestTagValueGCP(t *testing.T) {
testutil.Require(t, testutil.GCP)
t.Parallel()

View File

@ -75,7 +75,7 @@ var sparkVersions = []string{
"14.1.x-scala2.12",
}
func TestAccRunPythonTaskWorkspace(t *testing.T) {
func TestRunPythonTaskWorkspace(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
@ -96,7 +96,7 @@ func TestAccRunPythonTaskWorkspace(t *testing.T) {
})
}
func TestAccRunPythonTaskDBFS(t *testing.T) {
func TestRunPythonTaskDBFS(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
@ -109,7 +109,7 @@ func TestAccRunPythonTaskDBFS(t *testing.T) {
})
}
func TestAccRunPythonTaskRepo(t *testing.T) {
func TestRunPythonTaskRepo(t *testing.T) {
// TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly
testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST")
testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")