From a4879c0d91921554c482a23360aa20c31dafdeac Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Mon, 5 Jun 2023 01:06:42 +0200 Subject: [PATCH] added integration tests --- cmd/fs/ls.go | 4 +++ internal/filer_test.go | 2 +- internal/ls_test.go | 79 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 84 insertions(+), 1 deletion(-) diff --git a/cmd/fs/ls.go b/cmd/fs/ls.go index d8626f299..828610d99 100644 --- a/cmd/fs/ls.go +++ b/cmd/fs/ls.go @@ -3,6 +3,7 @@ package fs import ( "fmt" "net/url" + "sort" "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" @@ -59,6 +60,9 @@ var lsCmd = &cobra.Command{ return err } lsOutputs = append(lsOutputs, *parsedEntry) + sort.Slice(lsOutputs, func(i, j int) bool { + return lsOutputs[i].Name < lsOutputs[j].Name + }) } // Use template for long mode if the flag is set diff --git a/internal/filer_test.go b/internal/filer_test.go index 5037f7840..81c3e4aea 100644 --- a/internal/filer_test.go +++ b/internal/filer_test.go @@ -241,7 +241,7 @@ func TestAccFilerWorkspaceFilesReadDir(t *testing.T) { func temporaryDbfsDir(t *testing.T, w *databricks.WorkspaceClient) string { ctx := context.Background() - path := fmt.Sprintf("/tmp/%s", RandomName("integration-test-filer-dbfs-")) + path := fmt.Sprintf("/tmp/%s", RandomName("integration-test-dbfs-")) // This call fails if the path already exists. t.Logf("mkdir dbfs:%s", path) diff --git a/internal/ls_test.go b/internal/ls_test.go index 8807a5d62..1c716bf01 100644 --- a/internal/ls_test.go +++ b/internal/ls_test.go @@ -1,2 +1,81 @@ package internal +import ( + "context" + "encoding/json" + "io/fs" + "strings" + "testing" + + _ "github.com/databricks/cli/cmd/fs" + "github.com/databricks/cli/libs/filer" + "github.com/databricks/databricks-sdk-go" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFsLsForDbfs(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + ctx := context.Background() + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + tmpDir := temporaryDbfsDir(t, w) + + f, err := filer.NewDbfsClient(w, tmpDir) + require.NoError(t, err) + + err = f.Mkdir(ctx, "a") + require.NoError(t, err) + err = f.Write(ctx, "a/hello.txt", strings.NewReader("abc"), filer.CreateParentDirectories) + require.NoError(t, err) + err = f.Write(ctx, "bye.txt", strings.NewReader("def")) + require.NoError(t, err) + + stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", "dbfs:"+tmpDir, "--output=json") + assert.Equal(t, "", stderr.String()) + var parsedStdout []map[string]any + err = json.Unmarshal(stdout.Bytes(), &parsedStdout) + require.NoError(t, err) + + // assert on ls output + assert.Equal(t, "a", parsedStdout[0]["name"]) + assert.Equal(t, true, parsedStdout[0]["is_directory"]) + assert.Equal(t, float64(0), parsedStdout[0]["size"]) + assert.Equal(t, "bye.txt", parsedStdout[1]["name"]) + assert.Equal(t, false, parsedStdout[1]["is_directory"]) + assert.Equal(t, float64(3), parsedStdout[1]["size"]) +} + +func TestFsLsForDbfsOnEmptyDir(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + tmpDir := temporaryDbfsDir(t, w) + + stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", "dbfs:"+tmpDir, "--output=json") + assert.Equal(t, "", stderr.String()) + var parsedStdout []map[string]any + err = json.Unmarshal(stdout.Bytes(), &parsedStdout) + require.NoError(t, err) + + // assert on ls output + assert.Equal(t, 0, len(parsedStdout)) +} + +func TestFsLsForDbfsForNonexistingDir(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + _, _, err := RequireErrorRun(t, "fs", "ls", "dbfs:/john-cena", "--output=json") + assert.ErrorIs(t, err, fs.ErrNotExist) +} + +func TestFsLsWithoutScheme(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + _, _, err := RequireErrorRun(t, "fs", "ls", "/ray-mysterio", "--output=json") + assert.ErrorContains(t, err, "expected dbfs path (with the dbfs:/ prefix): /ray-mysterio") +}