Move test helpers from internal to `acc` and `testutil` (#2008)

## Changes

This change moves fixture helpers to `internal/acc/fixtures.go`. These
helpers create an ephemeral path or resource for the duration of a test.
Call sites are updated to use `acc.WorkspaceTest()` to construct a
workspace-focused test wrapper as needed.

This change also moves the `GetNodeTypeID()` function to `testutil`.

## Tests

n/a
This commit is contained in:
Pieter Noordhuis 2024-12-12 22:28:04 +01:00 committed by GitHub
parent e472b5d888
commit 61b0c59137
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 309 additions and 380 deletions

133
internal/acc/fixtures.go Normal file
View File

@ -0,0 +1,133 @@
package acc
import (
"fmt"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/files"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/require"
)
func TemporaryWorkspaceDir(t *WorkspaceT, name ...string) string {
ctx := t.ctx
me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err)
// Prefix the name with "integration-test-" to make it easier to identify.
name = append([]string{"integration-test-"}, name...)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName(name...))
t.Logf("Creating workspace directory %s", basePath)
err = t.W.Workspace.MkdirsByPath(ctx, basePath)
require.NoError(t, err)
// Remove test directory on test completion.
t.Cleanup(func() {
t.Logf("Removing workspace directory %s", basePath)
err := t.W.Workspace.Delete(ctx, workspace.Delete{
Path: basePath,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err)
})
return basePath
}
func TemporaryDbfsDir(t *WorkspaceT, name ...string) string {
ctx := t.ctx
// Prefix the name with "integration-test-" to make it easier to identify.
name = append([]string{"integration-test-"}, name...)
path := fmt.Sprintf("/tmp/%s", testutil.RandomName(name...))
t.Logf("Creating DBFS directory %s", path)
err := t.W.Dbfs.MkdirsByPath(ctx, path)
require.NoError(t, err)
t.Cleanup(func() {
t.Logf("Removing DBFS directory %s", path)
err := t.W.Dbfs.Delete(ctx, files.Delete{
Path: path,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove temporary DBFS directory %s: %#v", path, err)
})
return path
}
func TemporaryRepo(t *WorkspaceT, url string) string {
ctx := t.ctx
me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err)
// Prefix the path with "integration-test-" to make it easier to identify.
path := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("integration-test-"))
t.Logf("Creating repo: %s", path)
resp, err := t.W.Repos.Create(ctx, workspace.CreateRepoRequest{
Url: url,
Path: path,
Provider: "gitHub",
})
require.NoError(t, err)
t.Cleanup(func() {
t.Logf("Removing repo: %s", path)
err := t.W.Repos.Delete(ctx, workspace.DeleteRepoRequest{
RepoId: resp.Id,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove repo %s: %#v", path, err)
})
return path
}
// Create a new Unity Catalog volume in a catalog called "main" in the workspace.
func TemporaryVolume(t *WorkspaceT) string {
ctx := t.ctx
w := t.W
// Create a schema
schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main",
Name: testutil.RandomName("test-schema-"),
})
require.NoError(t, err)
t.Cleanup(func() {
err := w.Schemas.Delete(ctx, catalog.DeleteSchemaRequest{
FullName: schema.FullName,
})
require.NoError(t, err)
})
// Create a volume
volume, err := w.Volumes.Create(ctx, catalog.CreateVolumeRequestContent{
CatalogName: "main",
SchemaName: schema.Name,
Name: "my-volume",
VolumeType: catalog.VolumeTypeManaged,
})
require.NoError(t, err)
t.Cleanup(func() {
err := w.Volumes.Delete(ctx, catalog.DeleteVolumeRequest{
Name: volume.FullName,
})
require.NoError(t, err)
})
return fmt.Sprintf("/Volumes/%s/%s/%s", "main", schema.Name, volume.Name)
}

View File

@ -2,14 +2,11 @@ package acc
import (
"context"
"fmt"
"os"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/require"
)
@ -97,30 +94,3 @@ func (t *WorkspaceT) RunPython(code string) (string, error) {
require.True(t, ok, "unexpected type %T", results.Data)
return output, nil
}
func (t *WorkspaceT) TemporaryWorkspaceDir(name ...string) string {
ctx := context.Background()
me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName(name...))
t.Logf("Creating %s", basePath)
err = t.W.Workspace.MkdirsByPath(ctx, basePath)
require.NoError(t, err)
// Remove test directory on test completion.
t.Cleanup(func() {
t.Logf("Removing %s", basePath)
err := t.W.Workspace.Delete(ctx, workspace.Delete{
Path: basePath,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err)
})
return basePath
}

View File

@ -12,7 +12,6 @@ import (
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
@ -34,12 +33,11 @@ func touchEmptyFile(t *testing.T, path string) {
func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
dir := t.TempDir()
whlPath := filepath.Join(dir, "dist", "test.whl")
touchEmptyFile(t, whlPath)
wsDir := internal.TemporaryWorkspaceDir(t, w)
wsDir := acc.TemporaryWorkspaceDir(wt, "artifact-")
b := &bundle.Bundle{
BundleRootPath: dir,
@ -99,12 +97,11 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
dir := t.TempDir()
whlPath := filepath.Join(dir, "dist", "test.whl")
touchEmptyFile(t, whlPath)
wsDir := internal.TemporaryWorkspaceDir(t, w)
wsDir := acc.TemporaryWorkspaceDir(wt, "artifact-")
b := &bundle.Bundle{
BundleRootPath: dir,
@ -164,13 +161,12 @@ func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T)
func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skip("Skipping tests that require a UC Volume when metastore id is not set.")
}
volumePath := internal.TemporaryUcVolume(t, w)
volumePath := acc.TemporaryVolume(wt)
dir := t.TempDir()
whlPath := filepath.Join(dir, "dist", "test.whl")

View File

@ -5,9 +5,8 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
@ -15,7 +14,7 @@ import (
func TestAccBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
ctx, _ := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root, err := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,

View File

@ -6,7 +6,6 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
@ -22,9 +21,9 @@ func TestAccBindJobToExistingJob(t *testing.T) {
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: t, w: wt.W}
gt := &generateJobTest{T: wt, w: wt.W}
nodeTypeId := internal.GetNodeTypeId(env)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,
@ -87,9 +86,9 @@ func TestAccAbortBind(t *testing.T) {
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: t, w: wt.W}
gt := &generateJobTest{T: wt, w: wt.W}
nodeTypeId := internal.GetNodeTypeId(env)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,
@ -136,7 +135,7 @@ func TestAccGenerateAndBind(t *testing.T) {
t.Log(env)
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: t, w: wt.W}
gt := &generateJobTest{T: wt, w: wt.W}
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{

View File

@ -4,10 +4,8 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
@ -20,7 +18,7 @@ func TestAccDeployBundleWithCluster(t *testing.T) {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
}
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root, err := initTestTemplate(t, ctx, "clusters", map[string]any{
"unique_id": uniqueId,

View File

@ -11,10 +11,9 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
@ -133,7 +132,7 @@ func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,
@ -219,7 +218,7 @@ properties such as the 'catalog' or 'storage' are changed:
func TestAccDeployBasicBundleLogs(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root, err := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,

View File

@ -5,9 +5,8 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -17,7 +16,7 @@ func TestAccBundleDeployThenRemoveResources(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,

View File

@ -4,9 +4,8 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
@ -14,7 +13,7 @@ import (
func TestAccDeployBasicToSharedWorkspacePath(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
currentUser, err := wt.W.CurrentUser.Me(ctx)

View File

@ -7,7 +7,6 @@ import (
"testing"
"github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
@ -21,7 +20,7 @@ func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,

View File

@ -6,9 +6,8 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
@ -19,7 +18,7 @@ func TestAccBundleDestroy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,

View File

@ -9,7 +9,6 @@ import (
"strings"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
@ -23,7 +22,7 @@ import (
func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
gt := &generateJobTest{T: t, w: wt.W}
gt := &generateJobTest{T: wt, w: wt.W}
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{
@ -70,7 +69,7 @@ func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) {
}
type generateJobTest struct {
T *testing.T
T *acc.WorkspaceT
w *databricks.WorkspaceClient
}
@ -78,17 +77,7 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 {
t := gt.T
w := gt.w
var nodeTypeId string
switch testutil.GetCloud(t) {
case testutil.AWS:
nodeTypeId = "i3.xlarge"
case testutil.Azure:
nodeTypeId = "Standard_DS4_v2"
case testutil.GCP:
nodeTypeId = "n1-standard-4"
}
tmpdir := internal.TemporaryWorkspaceDir(t, w)
tmpdir := acc.TemporaryWorkspaceDir(t, "generate-job-")
f, err := filer.NewWorkspaceFilesClient(w, tmpdir)
require.NoError(t, err)
@ -103,7 +92,7 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 {
NewCluster: &compute.ClusterSpec{
SparkVersion: "13.3.x-scala2.12",
NumWorkers: 1,
NodeTypeId: nodeTypeId,
NodeTypeId: testutil.GetCloud(t).NodeTypeID(),
SparkConf: map[string]string{
"spark.databricks.enableWsfs": "true",
"spark.databricks.hive.metastore.glueCatalog.enabled": "true",

View File

@ -9,7 +9,6 @@ import (
"strings"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
@ -22,7 +21,7 @@ import (
func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
gt := &generatePipelineTest{T: t, w: wt.W}
gt := &generatePipelineTest{T: wt, w: wt.W}
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{
@ -78,7 +77,7 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) {
}
type generatePipelineTest struct {
T *testing.T
T *acc.WorkspaceT
w *databricks.WorkspaceClient
}
@ -86,7 +85,7 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string,
t := gt.T
w := gt.w
tmpdir := internal.TemporaryWorkspaceDir(t, w)
tmpdir := acc.TemporaryWorkspaceDir(t, "generate-pipeline-")
f, err := filer.NewWorkspaceFilesClient(w, tmpdir)
require.NoError(t, err)
@ -96,8 +95,7 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string,
err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')"))
require.NoError(t, err)
env := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(env)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
name := testutil.RandomName("generated-pipeline-")
resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{

View File

@ -10,9 +10,8 @@ import (
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/metadata"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
@ -23,7 +22,7 @@ func TestAccJobsMetadataFile(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "job_metadata", map[string]any{
"unique_id": uniqueId,

View File

@ -4,9 +4,8 @@ import (
"context"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/google/uuid"
@ -25,7 +24,7 @@ func TestAccLocalStateStaleness(t *testing.T) {
// Because of deploy (2), the locally cached state of bundle instance A should be stale.
// Then for deploy (3), it must use the remote state over the stale local state.
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
initialize := func() string {
root, err := initTestTemplate(t, ctx, "basic", map[string]any{

View File

@ -3,7 +3,6 @@ package bundle
import (
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
@ -14,7 +13,7 @@ import (
func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonWheelWrapper bool) {
ctx, _ := acc.WorkspaceTest(t)
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
bundleRoot, err := initTestTemplate(t, ctx, templateName, map[string]any{
"node_type_id": nodeTypeId,

View File

@ -4,7 +4,6 @@ import (
"context"
"testing"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
@ -13,8 +12,7 @@ import (
)
func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) {
cloudEnv := testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(cloudEnv)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
tmpDir := t.TempDir()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
bundleRoot, err := initTestTemplateWithBundleRoot(t, ctx, "spark_jar_task", map[string]any{
@ -42,7 +40,7 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, arti
func runSparkJarTestFromVolume(t *testing.T, sparkVersion string) {
ctx, wt := acc.UcWorkspaceTest(t)
volumePath := internal.TemporaryUcVolume(t, wt.W)
volumePath := acc.TemporaryVolume(wt)
ctx = env.Set(ctx, "DATABRICKS_BUNDLE_TARGET", "volume")
runSparkJarTestCommon(t, ctx, sparkVersion, volumePath)
}

View File

@ -28,7 +28,7 @@ func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) {
dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`)
warehouseId := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-")
dir := acc.TemporaryWorkspaceDir(wt, "dashboard-assumptions-")
dashboard, err := wt.W.Lakeview.Create(ctx, dashboards.CreateDashboardRequest{
Dashboard: &dashboards.Dashboard{

View File

@ -7,10 +7,10 @@ import (
"strings"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -79,14 +79,10 @@ func TestAccFsCatForDbfsInvalidScheme(t *testing.T) {
}
func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
tmpDir := TemporaryDbfsDir(t, w)
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
tmpDir := acc.TemporaryDbfsDir(wt, "fs-cat-")
f, err := filer.NewDbfsClient(w, tmpDir)
require.NoError(t, err)

View File

@ -1,220 +1,20 @@
package internal
import (
"context"
"errors"
"fmt"
"net/http"
"os"
"path"
"path/filepath"
"strings"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/files"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/require"
)
func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
task := jobs.SubmitTask{
TaskKey: fmt.Sprintf("notebook_%s", strings.ReplaceAll(versions[i], ".", "_")),
NotebookTask: &jobs.NotebookTask{
NotebookPath: notebookPath,
},
NewCluster: &compute.ClusterSpec{
SparkVersion: versions[i],
NumWorkers: 1,
NodeTypeId: nodeTypeId,
DataSecurityMode: compute.DataSecurityModeUserIsolation,
},
}
tasks = append(tasks, task)
}
return tasks
}
func GenerateSparkPythonTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
task := jobs.SubmitTask{
TaskKey: fmt.Sprintf("spark_%s", strings.ReplaceAll(versions[i], ".", "_")),
SparkPythonTask: &jobs.SparkPythonTask{
PythonFile: notebookPath,
},
NewCluster: &compute.ClusterSpec{
SparkVersion: versions[i],
NumWorkers: 1,
NodeTypeId: nodeTypeId,
DataSecurityMode: compute.DataSecurityModeUserIsolation,
},
}
tasks = append(tasks, task)
}
return tasks
}
func GenerateWheelTasks(wheelPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
task := jobs.SubmitTask{
TaskKey: fmt.Sprintf("whl_%s", strings.ReplaceAll(versions[i], ".", "_")),
PythonWheelTask: &jobs.PythonWheelTask{
PackageName: "my_test_code",
EntryPoint: "run",
},
NewCluster: &compute.ClusterSpec{
SparkVersion: versions[i],
NumWorkers: 1,
NodeTypeId: nodeTypeId,
DataSecurityMode: compute.DataSecurityModeUserIsolation,
},
Libraries: []compute.Library{
{Whl: wheelPath},
},
}
tasks = append(tasks, task)
}
return tasks
}
func TemporaryWorkspaceDir(t testutil.TestingT, w *databricks.WorkspaceClient) string {
ctx := context.Background()
me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName("integration-test-wsfs-"))
t.Logf("Creating %s", basePath)
err = w.Workspace.MkdirsByPath(ctx, basePath)
require.NoError(t, err)
// Remove test directory on test completion.
t.Cleanup(func() {
t.Logf("Removing %s", basePath)
err := w.Workspace.Delete(ctx, workspace.Delete{
Path: basePath,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err)
})
return basePath
}
func TemporaryDbfsDir(t testutil.TestingT, w *databricks.WorkspaceClient) string {
ctx := context.Background()
path := fmt.Sprintf("/tmp/%s", testutil.RandomName("integration-test-dbfs-"))
t.Logf("Creating DBFS folder:%s", path)
err := w.Dbfs.MkdirsByPath(ctx, path)
require.NoError(t, err)
t.Cleanup(func() {
t.Logf("Removing DBFS folder:%s", path)
err := w.Dbfs.Delete(ctx, files.Delete{
Path: path,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("unable to remove temporary dbfs directory %s: %#v", path, err)
})
return path
}
// Create a new UC volume in a catalog called "main" in the workspace.
func TemporaryUcVolume(t testutil.TestingT, w *databricks.WorkspaceClient) string {
ctx := context.Background()
// Create a schema
schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main",
Name: testutil.RandomName("test-schema-"),
})
require.NoError(t, err)
t.Cleanup(func() {
err := w.Schemas.Delete(ctx, catalog.DeleteSchemaRequest{
FullName: schema.FullName,
})
require.NoError(t, err)
})
// Create a volume
volume, err := w.Volumes.Create(ctx, catalog.CreateVolumeRequestContent{
CatalogName: "main",
SchemaName: schema.Name,
Name: "my-volume",
VolumeType: catalog.VolumeTypeManaged,
})
require.NoError(t, err)
t.Cleanup(func() {
err := w.Volumes.Delete(ctx, catalog.DeleteVolumeRequest{
Name: volume.FullName,
})
require.NoError(t, err)
})
return path.Join("/Volumes", "main", schema.Name, volume.Name)
}
func TemporaryRepo(t testutil.TestingT, w *databricks.WorkspaceClient) string {
ctx := context.Background()
me, err := w.CurrentUser.Me(ctx)
require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("integration-test-repo-"))
t.Logf("Creating repo:%s", repoPath)
repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{
Url: "https://github.com/databricks/cli",
Provider: "github",
Path: repoPath,
})
require.NoError(t, err)
t.Cleanup(func() {
t.Logf("Removing repo: %s", repoPath)
err := w.Repos.Delete(ctx, workspace.DeleteRepoRequest{
RepoId: repoInfo.Id,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("unable to remove repo %s: %#v", repoPath, err)
})
return repoPath
}
func GetNodeTypeId(env string) string {
if env == "gcp" {
return "n1-standard-4"
} else if env == "aws" || env == "ucws" {
// aws-prod-ucws has CLOUD_ENV set to "ucws"
return "i3.xlarge"
}
return "Standard_DS4_v2"
}
func setupLocalFiler(t testutil.TestingT) (filer.Filer, string) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
@ -228,7 +28,7 @@ func setupLocalFiler(t testutil.TestingT) (filer.Filer, string) {
func setupWsfsFiler(t testutil.TestingT) (filer.Filer, string) {
ctx, wt := acc.WorkspaceTest(t)
tmpdir := TemporaryWorkspaceDir(t, wt.W)
tmpdir := acc.TemporaryWorkspaceDir(wt)
f, err := filer.NewWorkspaceFilesClient(wt.W, tmpdir)
require.NoError(t, err)
@ -245,36 +45,31 @@ func setupWsfsFiler(t testutil.TestingT) (filer.Filer, string) {
func setupWsfsExtensionsFiler(t testutil.TestingT) (filer.Filer, string) {
_, wt := acc.WorkspaceTest(t)
tmpdir := TemporaryWorkspaceDir(t, wt.W)
tmpdir := acc.TemporaryWorkspaceDir(wt)
f, err := filer.NewWorkspaceFilesExtensionsClient(wt.W, tmpdir)
require.NoError(t, err)
return f, tmpdir
}
func setupDbfsFiler(t testutil.TestingT) (filer.Filer, string) {
_, wt := acc.WorkspaceTest(t)
tmpDir := TemporaryDbfsDir(t, wt.W)
f, err := filer.NewDbfsClient(wt.W, tmpDir)
tmpdir := acc.TemporaryDbfsDir(wt)
f, err := filer.NewDbfsClient(wt.W, tmpdir)
require.NoError(t, err)
return f, path.Join("dbfs:/", tmpDir)
return f, path.Join("dbfs:/", tmpdir)
}
func setupUcVolumesFiler(t testutil.TestingT) (filer.Filer, string) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, wt := acc.WorkspaceTest(t)
if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skip("Skipping tests that require a UC Volume when metastore id is not set.")
}
w, err := databricks.NewWorkspaceClient()
tmpdir := acc.TemporaryVolume(wt)
f, err := filer.NewFilesClient(wt.W, tmpdir)
require.NoError(t, err)
tmpDir := TemporaryUcVolume(t, w)
f, err := filer.NewFilesClient(w, tmpDir)
require.NoError(t, err)
return f, path.Join("dbfs:/", tmpDir)
return f, path.Join("dbfs:/", tmpdir)
}

View File

@ -11,6 +11,7 @@ import (
"testing"
"time"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
lockpkg "github.com/databricks/cli/libs/locker"
@ -164,14 +165,12 @@ func TestAccLock(t *testing.T) {
assert.True(t, lockers[indexOfAnInactiveLocker].Active)
}
func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.Filer) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
func setupLockerTest(t *testing.T) (context.Context, *lockpkg.Locker, filer.Filer) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
// create temp wsfs dir
tmpDir := TemporaryWorkspaceDir(t, w)
tmpDir := acc.TemporaryWorkspaceDir(wt, "locker-")
f, err := filer.NewWorkspaceFilesClient(w, tmpDir)
require.NoError(t, err)
@ -179,12 +178,11 @@ func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.
locker, err := lockpkg.CreateLocker("redfoo@databricks.com", tmpDir, w)
require.NoError(t, err)
return locker, f
return ctx, locker, f
}
func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
ctx := context.Background()
locker, f := setupLockerTest(ctx, t)
ctx, locker, f := setupLockerTest(t)
var err error
// Acquire lock on tmp directory
@ -205,8 +203,7 @@ func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) {
}
func TestAccLockUnlockWithAllowsLockFileNotExist(t *testing.T) {
ctx := context.Background()
locker, f := setupLockerTest(ctx, t)
ctx, locker, f := setupLockerTest(t)
var err error
// Acquire lock on tmp directory

View File

@ -14,10 +14,11 @@ import (
"time"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/require"
@ -127,14 +128,14 @@ func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) {
w := tw.w
nodeTypeId := internal.GetNodeTypeId(env)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
tasks := make([]jobs.SubmitTask, 0)
if opts.includeNotebookTasks {
tasks = append(tasks, internal.GenerateNotebookTasks(tw.pyNotebookPath, sparkVersions, nodeTypeId)...)
tasks = append(tasks, GenerateNotebookTasks(tw.pyNotebookPath, sparkVersions, nodeTypeId)...)
}
if opts.includeSparkPythonTasks {
tasks = append(tasks, internal.GenerateSparkPythonTasks(tw.sparkPythonPath, sparkVersions, nodeTypeId)...)
tasks = append(tasks, GenerateSparkPythonTasks(tw.sparkPythonPath, sparkVersions, nodeTypeId)...)
}
if opts.includeWheelTasks {
@ -142,7 +143,7 @@ func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) {
if len(opts.wheelSparkVersions) > 0 {
versions = opts.wheelSparkVersions
}
tasks = append(tasks, internal.GenerateWheelTasks(tw.wheelPath, versions, nodeTypeId)...)
tasks = append(tasks, GenerateWheelTasks(tw.wheelPath, versions, nodeTypeId)...)
}
ctx := context.Background()
@ -179,13 +180,13 @@ func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) {
}
func prepareWorkspaceFiles(t *testing.T) *testFiles {
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
var err error
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
baseDir := acc.TemporaryWorkspaceDir(wt, "python-tasks-")
baseDir := internal.TemporaryWorkspaceDir(t, w)
pyNotebookPath := path.Join(baseDir, "test.py")
err = w.Workspace.Import(ctx, workspace.Import{
Path: pyNotebookPath,
Overwrite: true,
@ -225,11 +226,12 @@ func prepareWorkspaceFiles(t *testing.T) *testFiles {
}
func prepareDBFSFiles(t *testing.T) *testFiles {
ctx := context.Background()
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
var err error
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
baseDir := acc.TemporaryDbfsDir(wt, "python-tasks-")
baseDir := internal.TemporaryDbfsDir(t, w)
f, err := filer.NewDbfsClient(w, baseDir)
require.NoError(t, err)
@ -254,15 +256,83 @@ func prepareDBFSFiles(t *testing.T) *testFiles {
}
func prepareRepoFiles(t *testing.T) *testFiles {
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
_, wt := acc.WorkspaceTest(t)
w := wt.W
baseDir := acc.TemporaryRepo(wt, "https://github.com/databricks/cli")
repo := internal.TemporaryRepo(t, w)
packagePath := "internal/python/testdata"
return &testFiles{
w: w,
pyNotebookPath: path.Join(repo, packagePath, "test"),
sparkPythonPath: path.Join(repo, packagePath, "spark.py"),
wheelPath: path.Join(repo, packagePath, "my_test_code-0.0.1-py3-none-any.whl"),
pyNotebookPath: path.Join(baseDir, packagePath, "test"),
sparkPythonPath: path.Join(baseDir, packagePath, "spark.py"),
wheelPath: path.Join(baseDir, packagePath, "my_test_code-0.0.1-py3-none-any.whl"),
}
}
func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
task := jobs.SubmitTask{
TaskKey: fmt.Sprintf("notebook_%s", strings.ReplaceAll(versions[i], ".", "_")),
NotebookTask: &jobs.NotebookTask{
NotebookPath: notebookPath,
},
NewCluster: &compute.ClusterSpec{
SparkVersion: versions[i],
NumWorkers: 1,
NodeTypeId: nodeTypeId,
DataSecurityMode: compute.DataSecurityModeUserIsolation,
},
}
tasks = append(tasks, task)
}
return tasks
}
func GenerateSparkPythonTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
task := jobs.SubmitTask{
TaskKey: fmt.Sprintf("spark_%s", strings.ReplaceAll(versions[i], ".", "_")),
SparkPythonTask: &jobs.SparkPythonTask{
PythonFile: notebookPath,
},
NewCluster: &compute.ClusterSpec{
SparkVersion: versions[i],
NumWorkers: 1,
NodeTypeId: nodeTypeId,
DataSecurityMode: compute.DataSecurityModeUserIsolation,
},
}
tasks = append(tasks, task)
}
return tasks
}
func GenerateWheelTasks(wheelPath string, versions []string, nodeTypeId string) []jobs.SubmitTask {
tasks := make([]jobs.SubmitTask, 0)
for i := 0; i < len(versions); i++ {
task := jobs.SubmitTask{
TaskKey: fmt.Sprintf("whl_%s", strings.ReplaceAll(versions[i], ".", "_")),
PythonWheelTask: &jobs.PythonWheelTask{
PackageName: "my_test_code",
EntryPoint: "run",
},
NewCluster: &compute.ClusterSpec{
SparkVersion: versions[i],
NumWorkers: 1,
NodeTypeId: nodeTypeId,
DataSecurityMode: compute.DataSecurityModeUserIsolation,
},
Libraries: []compute.Library{
{Whl: wheelPath},
},
}
tasks = append(tasks, task)
}
return tasks
}

View File

@ -15,6 +15,7 @@ import (
"testing"
"time"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
@ -72,11 +73,11 @@ type syncTest struct {
}
func setupSyncTest(t *testing.T, args ...string) *syncTest {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
_, wt := acc.WorkspaceTest(t)
w := wt.W
w := databricks.Must(databricks.NewWorkspaceClient())
localRoot := t.TempDir()
remoteRoot := TemporaryWorkspaceDir(t, w)
remoteRoot := acc.TemporaryWorkspaceDir(wt, "sync-")
f, err := filer.NewWorkspaceFilesClient(w, remoteRoot)
require.NoError(t, err)

View File

@ -1,33 +1,19 @@
package internal
import (
"context"
"strings"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/require"
)
func testTags(t *testing.T, tags map[string]string) error {
var nodeTypeId string
switch testutil.GetCloud(t) {
case testutil.AWS:
nodeTypeId = "i3.xlarge"
case testutil.Azure:
nodeTypeId = "Standard_DS4_v2"
case testutil.GCP:
nodeTypeId = "n1-standard-4"
}
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
ctx := context.Background()
resp, err := w.Jobs.Create(ctx, jobs.CreateJob{
ctx, wt := acc.WorkspaceTest(t)
resp, err := wt.W.Jobs.Create(ctx, jobs.CreateJob{
Name: testutil.RandomName("test-tags-"),
Tasks: []jobs.Task{
{
@ -35,7 +21,7 @@ func testTags(t *testing.T, tags map[string]string) error {
NewCluster: &compute.ClusterSpec{
SparkVersion: "13.3.x-scala2.12",
NumWorkers: 1,
NodeTypeId: nodeTypeId,
NodeTypeId: testutil.GetCloud(t).NodeTypeID(),
},
SparkPythonTask: &jobs.SparkPythonTask{
PythonFile: "/doesnt_exist.py",
@ -47,7 +33,7 @@ func testTags(t *testing.T, tags map[string]string) error {
if resp != nil {
t.Cleanup(func() {
_ = w.Jobs.DeleteByJobId(ctx, resp.JobId)
_ = wt.W.Jobs.DeleteByJobId(ctx, resp.JobId)
// Cannot enable errchecking there, tests fail with:
// Error: Received unexpected error:
// Job 0 does not exist.

View File

@ -28,6 +28,19 @@ func (c Cloud) String() string {
}
}
func (c Cloud) NodeTypeID() string {
switch c {
case AWS:
return "i3.xlarge"
case Azure:
return "Standard_DS4_v2"
case GCP:
return "n1-standard-4"
default:
return "unknown"
}
}
func GetCloud(t TestingT) Cloud {
env := GetEnvOrSkipTest(t, "CLOUD_ENV")
switch env {

View File

@ -15,7 +15,6 @@ import (
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -44,11 +43,10 @@ func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) {
}
func TestAccWorkpaceExportPrintsContents(t *testing.T) {
t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
ctx := context.Background()
w := databricks.Must(databricks.NewWorkspaceClient())
tmpdir := TemporaryWorkspaceDir(t, w)
tmpdir := acc.TemporaryWorkspaceDir(wt, "workspace-export-")
f, err := filer.NewWorkspaceFilesClient(w, tmpdir)
require.NoError(t, err)
@ -65,9 +63,10 @@ func TestAccWorkpaceExportPrintsContents(t *testing.T) {
func setupWorkspaceImportExportTest(t *testing.T) (context.Context, filer.Filer, string) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
tmpdir := TemporaryWorkspaceDir(t, wt.W)
f, err := filer.NewWorkspaceFilesClient(wt.W, tmpdir)
tmpdir := acc.TemporaryWorkspaceDir(wt, "workspace-import-")
f, err := filer.NewWorkspaceFilesClient(w, tmpdir)
require.NoError(t, err)
return ctx, f, tmpdir