Print text logs in `import-dir` and `export-dir` commands (#1682)

## Changes
In https://github.com/databricks/cli/pull/1202 the semantics of
`cmdio.RenderJson` was changes to always render the JSON object. Before
we would only render it if `--output json` was specified.

This PR fixes the logs to print human-readable log lines instead of a
JSON object.
This PR also removes the now unused `cmdio.Render` method.

## Tests
Manually:
```
➜  bundle-playground git:(master) ✗ cli workspace import-dir ./tmp /Users/shreyas.goenka@databricks.com/test-import-1 -p aws-prod-ucws
Importing files from ./tmp
a -> /Users/shreyas.goenka@databricks.com/test-import-1/a
Import complete. The files are available at /Users/shreyas.goenka@databricks.com/test-import-1
```
```
➜  bundle-playground git:(master) ✗ cli workspace export-dir  /Users/shreyas.goenka@databricks.com/test-export-1 ./tmp-2 -p aws-prod-ucws
Exporting files from /Users/shreyas.goenka@databricks.com/test-export-1
/Users/shreyas.goenka@databricks.com/test-export-1/b -> tmp-2/b
Exported complete. The files are available at ./tmp-2
```
This commit is contained in:
shreyas-goenka 2024-08-15 18:23:02 +05:30 committed by GitHub
parent 6b3d33a846
commit a6eb673d55
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 39 additions and 32 deletions

View File

@ -110,8 +110,7 @@ func newExportDir() *cobra.Command {
}
workspaceFS := filer.NewFS(ctx, workspaceFiler)
// TODO: print progress events on stderr instead: https://github.com/databricks/cli/issues/448
err = cmdio.RenderJson(ctx, newExportStartedEvent(opts.sourceDir))
err = cmdio.RenderWithTemplate(ctx, newExportStartedEvent(opts.sourceDir), "", "Exporting files from {{.SourcePath}}\n")
if err != nil {
return err
}
@ -120,7 +119,7 @@ func newExportDir() *cobra.Command {
if err != nil {
return err
}
return cmdio.RenderJson(ctx, newExportCompletedEvent(opts.targetDir))
return cmdio.RenderWithTemplate(ctx, newExportCompletedEvent(opts.targetDir), "", "Export complete\n")
}
return cmd

View File

@ -134,8 +134,7 @@ Notebooks will have their extensions (one of .scala, .py, .sql, .ipynb, .r) stri
return err
}
// TODO: print progress events on stderr instead: https://github.com/databricks/cli/issues/448
err = cmdio.RenderJson(ctx, newImportStartedEvent(opts.sourceDir))
err = cmdio.RenderWithTemplate(ctx, newImportStartedEvent(opts.sourceDir), "", "Importing files from {{.SourcePath}}\n")
if err != nil {
return err
}
@ -145,7 +144,7 @@ Notebooks will have their extensions (one of .scala, .py, .sql, .ipynb, .r) stri
if err != nil {
return err
}
return cmdio.RenderJson(ctx, newImportCompletedEvent(opts.targetDir))
return cmdio.RenderWithTemplate(ctx, newImportCompletedEvent(opts.targetDir), "", "Import complete\n")
}
return cmd

View File

@ -3,18 +3,17 @@ package internal
import (
"context"
"encoding/base64"
"errors"
"fmt"
"io"
"net/http"
"os"
"path"
"path/filepath"
"strings"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -63,21 +62,12 @@ func TestAccWorkpaceExportPrintsContents(t *testing.T) {
}
func setupWorkspaceImportExportTest(t *testing.T) (context.Context, filer.Filer, string) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
ctx, wt := acc.WorkspaceTest(t)
ctx := context.Background()
w := databricks.Must(databricks.NewWorkspaceClient())
tmpdir := TemporaryWorkspaceDir(t, w)
f, err := filer.NewWorkspaceFilesClient(w, tmpdir)
tmpdir := TemporaryWorkspaceDir(t, wt.W)
f, err := filer.NewWorkspaceFilesClient(wt.W, tmpdir)
require.NoError(t, err)
// Check if we can use this API here, skip test if we cannot.
_, err = f.Read(ctx, "we_use_this_call_to_test_if_this_api_is_enabled")
var aerr *apierr.APIError
if errors.As(err, &aerr) && aerr.StatusCode == http.StatusBadRequest {
t.Skip(aerr.Message)
}
return ctx, f, tmpdir
}
@ -122,8 +112,21 @@ func TestAccExportDir(t *testing.T) {
err = f.Write(ctx, "a/b/c/file-b", strings.NewReader("def"), filer.CreateParentDirectories)
require.NoError(t, err)
expectedLogs := strings.Join([]string{
fmt.Sprintf("Exporting files from %s", sourceDir),
fmt.Sprintf("%s -> %s", path.Join(sourceDir, "a/b/c/file-b"), filepath.Join(targetDir, "a/b/c/file-b")),
fmt.Sprintf("%s -> %s", path.Join(sourceDir, "file-a"), filepath.Join(targetDir, "file-a")),
fmt.Sprintf("%s -> %s", path.Join(sourceDir, "pyNotebook"), filepath.Join(targetDir, "pyNotebook.py")),
fmt.Sprintf("%s -> %s", path.Join(sourceDir, "rNotebook"), filepath.Join(targetDir, "rNotebook.r")),
fmt.Sprintf("%s -> %s", path.Join(sourceDir, "scalaNotebook"), filepath.Join(targetDir, "scalaNotebook.scala")),
fmt.Sprintf("%s -> %s", path.Join(sourceDir, "sqlNotebook"), filepath.Join(targetDir, "sqlNotebook.sql")),
"Export complete\n",
}, "\n")
// Run Export
RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir)
stdout, stderr := RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir)
assert.Equal(t, expectedLogs, stdout.String())
assert.Equal(t, "", stderr.String())
// Assert files were exported
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "abc")
@ -176,10 +179,24 @@ func TestAccExportDirWithOverwriteFlag(t *testing.T) {
assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace")
}
// TODO: Add assertions on progress logs for workspace import-dir command. https://github.com/databricks/cli/issues/455
func TestAccImportDir(t *testing.T) {
ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t)
RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug")
stdout, stderr := RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug")
expectedLogs := strings.Join([]string{
fmt.Sprintf("Importing files from %s", "./testdata/import_dir"),
fmt.Sprintf("%s -> %s", filepath.FromSlash("a/b/c/file-b"), path.Join(targetDir, "a/b/c/file-b")),
fmt.Sprintf("%s -> %s", filepath.FromSlash("file-a"), path.Join(targetDir, "file-a")),
fmt.Sprintf("%s -> %s", filepath.FromSlash("jupyterNotebook.ipynb"), path.Join(targetDir, "jupyterNotebook")),
fmt.Sprintf("%s -> %s", filepath.FromSlash("pyNotebook.py"), path.Join(targetDir, "pyNotebook")),
fmt.Sprintf("%s -> %s", filepath.FromSlash("rNotebook.r"), path.Join(targetDir, "rNotebook")),
fmt.Sprintf("%s -> %s", filepath.FromSlash("scalaNotebook.scala"), path.Join(targetDir, "scalaNotebook")),
fmt.Sprintf("%s -> %s", filepath.FromSlash("sqlNotebook.sql"), path.Join(targetDir, "sqlNotebook")),
"Import complete\n",
}, "\n")
assert.Equal(t, expectedLogs, stdout.String())
assert.Equal(t, "", stderr.String())
// Assert files are imported
assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "hello, world")

View File

@ -280,14 +280,6 @@ func RenderIteratorWithTemplate[T any](ctx context.Context, i listing.Iterator[T
return renderWithTemplate(newIteratorRenderer(i), ctx, c.outputFormat, c.out, headerTemplate, template)
}
func RenderJson(ctx context.Context, v any) error {
c := fromContext(ctx)
if _, ok := v.(listingInterface); ok {
panic("use RenderIteratorJson instead")
}
return renderWithTemplate(newRenderer(v), ctx, flags.OutputJSON, c.out, c.headerTemplate, c.template)
}
func RenderIteratorJson[T any](ctx context.Context, i listing.Iterator[T]) error {
c := fromContext(ctx)
return renderWithTemplate(newIteratorRenderer(i), ctx, c.outputFormat, c.out, c.headerTemplate, c.template)