2023-06-05 23:16:23 +00:00
|
|
|
package internal
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"io/fs"
|
|
|
|
"path"
|
|
|
|
"strings"
|
|
|
|
"testing"
|
|
|
|
|
|
|
|
"github.com/databricks/cli/libs/filer"
|
|
|
|
"github.com/databricks/databricks-sdk-go"
|
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
2023-07-26 13:03:10 +00:00
|
|
|
func TestAccFsCatForDbfs(t *testing.T) {
|
2023-06-05 23:16:23 +00:00
|
|
|
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
|
|
|
|
|
|
|
|
ctx := context.Background()
|
|
|
|
w, err := databricks.NewWorkspaceClient()
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2023-10-03 11:18:55 +00:00
|
|
|
tmpDir := TemporaryDbfsDir(t, w)
|
2023-06-05 23:16:23 +00:00
|
|
|
|
|
|
|
f, err := filer.NewDbfsClient(w, tmpDir)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
err = f.Write(ctx, "a/hello.txt", strings.NewReader("abc"), filer.CreateParentDirectories)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
stdout, stderr := RequireSuccessfulRun(t, "fs", "cat", "dbfs:"+path.Join(tmpDir, "a", "hello.txt"))
|
|
|
|
assert.Equal(t, "", stderr.String())
|
|
|
|
assert.Equal(t, "abc", stdout.String())
|
|
|
|
}
|
|
|
|
|
2023-07-26 13:03:10 +00:00
|
|
|
func TestAccFsCatForDbfsOnNonExistentFile(t *testing.T) {
|
2023-06-05 23:16:23 +00:00
|
|
|
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
|
|
|
|
|
|
|
|
_, _, err := RequireErrorRun(t, "fs", "cat", "dbfs:/non-existent-file")
|
|
|
|
assert.ErrorIs(t, err, fs.ErrNotExist)
|
|
|
|
}
|
|
|
|
|
2023-07-26 13:03:10 +00:00
|
|
|
func TestAccFsCatForDbfsInvalidScheme(t *testing.T) {
|
2023-06-05 23:16:23 +00:00
|
|
|
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
|
|
|
|
|
|
|
|
_, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file")
|
2023-07-26 13:03:10 +00:00
|
|
|
assert.ErrorContains(t, err, "invalid scheme: dab")
|
2023-06-05 23:16:23 +00:00
|
|
|
}
|
|
|
|
|
2023-07-26 13:03:10 +00:00
|
|
|
func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) {
|
2023-06-05 23:16:23 +00:00
|
|
|
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
|
|
|
|
|
|
|
|
ctx := context.Background()
|
|
|
|
w, err := databricks.NewWorkspaceClient()
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2023-10-03 11:18:55 +00:00
|
|
|
tmpDir := TemporaryDbfsDir(t, w)
|
2023-06-05 23:16:23 +00:00
|
|
|
|
|
|
|
f, err := filer.NewDbfsClient(w, tmpDir)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
err = f.Write(ctx, "hello.txt", strings.NewReader("abc"))
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
_, _, err = RequireErrorRun(t, "fs", "cat", "dbfs:"+path.Join(tmpDir, "hello.txt"), "--output=json")
|
|
|
|
assert.ErrorContains(t, err, "json output not supported")
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Add test asserting an error when cat is called on an directory. Need this to be
|
|
|
|
// fixed in the SDK first (https://github.com/databricks/databricks-sdk-go/issues/414)
|