Compare commits

..

1 Commits

Author SHA1 Message Date
Andrew Nester 3c53a6e7cc
Merge 1c571962d2 into 1db384018c 2024-11-14 18:45:13 +00:00
15 changed files with 141 additions and 341 deletions

View File

@ -44,11 +44,6 @@ func (m *prependWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
return dyn.InvalidValue, fmt.Errorf("expected string, got %s", v.Kind())
}
// Skip prefixing if the path does not start with /, it might be variable reference or smth else.
if !strings.HasPrefix(path, "/") {
return pv, nil
}
for _, prefix := range skipPrefixes {
if strings.HasPrefix(path, prefix) {
return pv, nil

View File

@ -31,14 +31,6 @@ func TestPrependWorkspacePrefix(t *testing.T) {
path: "/Volumes/Users/test",
expected: "/Volumes/Users/test",
},
{
path: "~/test",
expected: "~/test",
},
{
path: "${workspace.file_path}/test",
expected: "${workspace.file_path}/test",
},
}
for _, tc := range testCases {

View File

@ -3,10 +3,8 @@ package generate
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"testing"
@ -215,119 +213,3 @@ func TestGenerateJobCommand(t *testing.T) {
require.NoError(t, err)
require.Equal(t, "# Databricks notebook source\nNotebook content", string(data))
}
func touchEmptyFile(t *testing.T, path string) {
err := os.MkdirAll(filepath.Dir(path), 0700)
require.NoError(t, err)
f, err := os.Create(path)
require.NoError(t, err)
f.Close()
}
func TestGenerateJobCommandOldFileRename(t *testing.T) {
cmd := NewGenerateJobCommand()
root := t.TempDir()
b := &bundle.Bundle{
BundleRootPath: root,
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
jobsApi := m.GetMockJobsAPI()
jobsApi.EXPECT().Get(mock.Anything, jobs.GetJobRequest{JobId: 1234}).Return(&jobs.Job{
Settings: &jobs.JobSettings{
Name: "test-job",
JobClusters: []jobs.JobCluster{
{NewCluster: compute.ClusterSpec{
CustomTags: map[string]string{
"Tag1": "24X7-1234",
},
}},
{NewCluster: compute.ClusterSpec{
SparkConf: map[string]string{
"spark.databricks.delta.preview.enabled": "true",
},
}},
},
Tasks: []jobs.Task{
{
TaskKey: "notebook_task",
NotebookTask: &jobs.NotebookTask{
NotebookPath: "/test/notebook",
},
},
},
Parameters: []jobs.JobParameterDefinition{
{
Name: "empty",
Default: "",
},
},
},
}, nil)
workspaceApi := m.GetMockWorkspaceAPI()
workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/test/notebook").Return(&workspace.ObjectInfo{
ObjectType: workspace.ObjectTypeNotebook,
Language: workspace.LanguagePython,
Path: "/test/notebook",
}, nil)
notebookContent := io.NopCloser(bytes.NewBufferString("# Databricks notebook source\nNotebook content"))
workspaceApi.EXPECT().Download(mock.Anything, "/test/notebook", mock.Anything).Return(notebookContent, nil)
cmd.SetContext(bundle.Context(context.Background(), b))
cmd.Flag("existing-job-id").Value.Set("1234")
configDir := filepath.Join(root, "resources")
cmd.Flag("config-dir").Value.Set(configDir)
srcDir := filepath.Join(root, "src")
cmd.Flag("source-dir").Value.Set(srcDir)
var key string
cmd.Flags().StringVar(&key, "key", "test_job", "")
// Create an old generated file first
oldFilename := filepath.Join(configDir, "test_job.yml")
touchEmptyFile(t, oldFilename)
// Having an existing files require --force flag to regenerate them
cmd.Flag("force").Value.Set("true")
err := cmd.RunE(cmd, []string{})
require.NoError(t, err)
// Make sure file do not exists after the run
_, err = os.Stat(oldFilename)
require.True(t, errors.Is(err, fs.ErrNotExist))
data, err := os.ReadFile(filepath.Join(configDir, "test_job.job.yml"))
require.NoError(t, err)
require.Equal(t, fmt.Sprintf(`resources:
jobs:
test_job:
name: test-job
job_clusters:
- new_cluster:
custom_tags:
"Tag1": "24X7-1234"
- new_cluster:
spark_conf:
"spark.databricks.delta.preview.enabled": "true"
tasks:
- task_key: notebook_task
notebook_task:
notebook_path: %s
parameters:
- name: empty
default: ""
`, filepath.Join("..", "src", "notebook.py")), string(data))
data, err = os.ReadFile(filepath.Join(srcDir, "notebook.py"))
require.NoError(t, err)
require.Equal(t, "# Databricks notebook source\nNotebook content", string(data))
}

View File

@ -88,12 +88,9 @@ func NewGenerateJobCommand() *cobra.Command {
oldFilename := filepath.Join(configDir, fmt.Sprintf("%s.yml", jobKey))
filename := filepath.Join(configDir, fmt.Sprintf("%s.job.yml", jobKey))
// User might continuously run generate command to update their bundle jobs with any changes made in Databricks UI.
// Due to changing in the generated file names, we need to first rename existing resource file to the new name.
// Otherwise users can end up with duplicated resources.
err = os.Rename(oldFilename, filename)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("failed to rename file %s. DABs uses the resource type as a sub-extension for generated content, please rename it to %s, err: %w", oldFilename, filename, err)
return fmt.Errorf("failed to rename file %s. DABs uses resource type as sub extension for generated content, please rename to %s, err: %w", oldFilename, filename, err)
}
saver := yamlsaver.NewSaverWithStyle(map[string]yaml.Style{

View File

@ -88,12 +88,9 @@ func NewGeneratePipelineCommand() *cobra.Command {
oldFilename := filepath.Join(configDir, fmt.Sprintf("%s.yml", pipelineKey))
filename := filepath.Join(configDir, fmt.Sprintf("%s.pipeline.yml", pipelineKey))
// User might continuously run generate command to update their bundle jobs with any changes made in Databricks UI.
// Due to changing in the generated file names, we need to first rename existing resource file to the new name.
// Otherwise users can end up with duplicated resources.
err = os.Rename(oldFilename, filename)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("failed to rename file %s. DABs uses the resource type as a sub-extension for generated content, please rename it to %s, err: %w", oldFilename, filename, err)
return fmt.Errorf("failed to rename file %s. DABs uses resource type as sub extension for generated content, please rename to %s, err: %w", oldFilename, filename, err)
}
saver := yamlsaver.NewSaverWithStyle(

View File

@ -7,7 +7,6 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/spf13/cobra"
@ -85,7 +84,7 @@ func setupTest(t *testing.T) (*validArgs, *cobra.Command, *mocks.MockWorkspaceCl
cmd, m := setupCommand(t)
fakeFilerForPath := func(ctx context.Context, fullPath string) (filer.Filer, string, error) {
fakeFiler := filer.NewFakeFiler(map[string]fakefs.FileInfo{
fakeFiler := filer.NewFakeFiler(map[string]filer.FakeFileInfo{
"dir": {FakeName: "root", FakeDir: true},
"dir/dirA": {FakeDir: true},
"dir/dirB": {FakeDir: true},

View File

@ -166,7 +166,7 @@ func TestAccGenerateAndBind(t *testing.T) {
_, err = os.Stat(filepath.Join(bundleRoot, "src", "test.py"))
require.NoError(t, err)
matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.job.yml"))
matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.yml"))
require.NoError(t, err)
require.Len(t, matches, 1)

View File

@ -1,21 +1,18 @@
package fakefs
import (
"fmt"
"io/fs"
"time"
)
var ErrNotImplemented = fmt.Errorf("not implemented")
// DirEntry is a fake implementation of [fs.DirEntry].
type DirEntry struct {
fs.FileInfo
FileInfo
}
func (entry DirEntry) Type() fs.FileMode {
typ := fs.ModePerm
if entry.IsDir() {
if entry.FakeDir {
typ |= fs.ModeDir
}
return typ
@ -56,32 +53,3 @@ func (info FileInfo) IsDir() bool {
func (info FileInfo) Sys() any {
return nil
}
// File is a fake implementation of [fs.File].
type File struct {
FileInfo fs.FileInfo
}
func (f File) Close() error {
return nil
}
func (f File) Read(p []byte) (n int, err error) {
return 0, ErrNotImplemented
}
func (f File) Stat() (fs.FileInfo, error) {
return f.FileInfo, nil
}
// FS is a fake implementation of [fs.FS].
type FS map[string]fs.File
func (f FS) Open(name string) (fs.File, error) {
e, ok := f[name]
if !ok {
return nil, fs.ErrNotExist
}
return e, nil
}

View File

@ -1,38 +0,0 @@
package fakefs
import (
"io/fs"
"testing"
"github.com/stretchr/testify/assert"
)
func TestFile(t *testing.T) {
var fakefile fs.File = File{
FileInfo: FileInfo{
FakeName: "file",
},
}
_, err := fakefile.Read([]byte{})
assert.ErrorIs(t, err, ErrNotImplemented)
fi, err := fakefile.Stat()
assert.NoError(t, err)
assert.Equal(t, "file", fi.Name())
err = fakefile.Close()
assert.NoError(t, err)
}
func TestFS(t *testing.T) {
var fakefs fs.FS = FS{
"file": File{},
}
_, err := fakefs.Open("doesntexist")
assert.ErrorIs(t, err, fs.ErrNotExist)
_, err = fakefs.Open("file")
assert.NoError(t, err)
}

View File

@ -6,7 +6,6 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/spf13/cobra"
@ -18,7 +17,7 @@ func setupCompleter(t *testing.T, onlyDirs bool) *completer {
// Needed to make type context.valueCtx for mockFilerForPath
ctx = root.SetWorkspaceClient(ctx, mocks.NewMockWorkspaceClient(t).WorkspaceClient)
fakeFiler := filer.NewFakeFiler(map[string]fakefs.FileInfo{
fakeFiler := filer.NewFakeFiler(map[string]filer.FakeFileInfo{
"dir": {FakeName: "root", FakeDir: true},
"dir/dirA": {FakeDir: true},
"dir/dirB": {FakeDir: true},

View File

@ -8,12 +8,58 @@ import (
"path"
"sort"
"strings"
"github.com/databricks/cli/libs/fakefs"
"time"
)
type FakeDirEntry struct {
FakeFileInfo
}
func (entry FakeDirEntry) Type() fs.FileMode {
typ := fs.ModePerm
if entry.FakeDir {
typ |= fs.ModeDir
}
return typ
}
func (entry FakeDirEntry) Info() (fs.FileInfo, error) {
return entry.FakeFileInfo, nil
}
type FakeFileInfo struct {
FakeName string
FakeSize int64
FakeDir bool
FakeMode fs.FileMode
}
func (info FakeFileInfo) Name() string {
return info.FakeName
}
func (info FakeFileInfo) Size() int64 {
return info.FakeSize
}
func (info FakeFileInfo) Mode() fs.FileMode {
return info.FakeMode
}
func (info FakeFileInfo) ModTime() time.Time {
return time.Now()
}
func (info FakeFileInfo) IsDir() bool {
return info.FakeDir
}
func (info FakeFileInfo) Sys() any {
return nil
}
type FakeFiler struct {
entries map[string]fakefs.FileInfo
entries map[string]FakeFileInfo
}
func (f *FakeFiler) Write(ctx context.Context, p string, reader io.Reader, mode ...WriteMode) error {
@ -51,7 +97,7 @@ func (f *FakeFiler) ReadDir(ctx context.Context, p string) ([]fs.DirEntry, error
continue
}
out = append(out, fakefs.DirEntry{FileInfo: v})
out = append(out, FakeDirEntry{v})
}
sort.Slice(out, func(i, j int) bool { return out[i].Name() < out[j].Name() })
@ -71,11 +117,7 @@ func (f *FakeFiler) Stat(ctx context.Context, path string) (fs.FileInfo, error)
return entry, nil
}
// NewFakeFiler creates a new fake [Filer] instance with the given entries.
// It sets the [Name] field of each entry to the base name of the path.
//
// This is meant to be used in tests.
func NewFakeFiler(entries map[string]fakefs.FileInfo) *FakeFiler {
func NewFakeFiler(entries map[string]FakeFileInfo) *FakeFiler {
fakeFiler := &FakeFiler{
entries: entries,
}

View File

@ -1,98 +0,0 @@
package filer
import (
"context"
"io"
"io/fs"
"testing"
"github.com/databricks/cli/libs/fakefs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFakeFiler_Read(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"file": {},
})
ctx := context.Background()
r, err := f.Read(ctx, "file")
require.NoError(t, err)
contents, err := io.ReadAll(r)
require.NoError(t, err)
// Contents of every file is "foo".
assert.Equal(t, "foo", string(contents))
}
func TestFakeFiler_Read_NotFound(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"foo": {},
})
ctx := context.Background()
_, err := f.Read(ctx, "bar")
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestFakeFiler_ReadDir_NotFound(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"dir1": {FakeDir: true},
})
ctx := context.Background()
_, err := f.ReadDir(ctx, "dir2")
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestFakeFiler_ReadDir_NotADirectory(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"file": {},
})
ctx := context.Background()
_, err := f.ReadDir(ctx, "file")
assert.ErrorIs(t, err, fs.ErrInvalid)
}
func TestFakeFiler_ReadDir(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"dir1": {FakeDir: true},
"dir1/file2": {},
"dir1/dir2": {FakeDir: true},
})
ctx := context.Background()
entries, err := f.ReadDir(ctx, "dir1/")
require.NoError(t, err)
require.Len(t, entries, 2)
// The entries are sorted by name.
assert.Equal(t, "dir2", entries[0].Name())
assert.True(t, entries[0].IsDir())
assert.Equal(t, "file2", entries[1].Name())
assert.False(t, entries[1].IsDir())
}
func TestFakeFiler_Stat(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"file": {},
})
ctx := context.Background()
info, err := f.Stat(ctx, "file")
require.NoError(t, err)
assert.Equal(t, "file", info.Name())
}
func TestFakeFiler_Stat_NotFound(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"foo": {},
})
ctx := context.Background()
_, err := f.Stat(ctx, "bar")
assert.ErrorIs(t, err, fs.ErrNotExist)
}

View File

@ -6,7 +6,6 @@ import (
"io/fs"
"testing"
"github.com/databricks/cli/libs/fakefs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -36,7 +35,7 @@ func TestFsDirImplementsFsReadDirFile(t *testing.T) {
}
func fakeFS() fs.FS {
fakeFiler := NewFakeFiler(map[string]fakefs.FileInfo{
fakeFiler := NewFakeFiler(map[string]FakeFileInfo{
".": {FakeName: "root", FakeDir: true},
"dirA": {FakeDir: true},
"dirB": {FakeDir: true},

View File

@ -7,7 +7,6 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -101,21 +100,11 @@ func TestDetectFileWithLongHeader(t *testing.T) {
assert.False(t, nb)
}
type fileInfoWithWorkspaceInfo struct {
fakefs.FileInfo
oi workspace.ObjectInfo
}
func (f fileInfoWithWorkspaceInfo) WorkspaceObjectInfo() workspace.ObjectInfo {
return f.oi
}
func TestDetectWithObjectInfo(t *testing.T) {
fakefs := fakefs.FS{
"file.py": fakefs.File{
FileInfo: fileInfoWithWorkspaceInfo{
oi: workspace.ObjectInfo{
fakeFS := &fakeFS{
fakeFile{
fakeFileInfo{
workspace.ObjectInfo{
ObjectType: workspace.ObjectTypeNotebook,
Language: workspace.LanguagePython,
},
@ -123,7 +112,7 @@ func TestDetectWithObjectInfo(t *testing.T) {
},
}
nb, lang, err := DetectWithFS(fakefs, "file.py")
nb, lang, err := DetectWithFS(fakeFS, "doesntmatter")
require.NoError(t, err)
assert.True(t, nb)
assert.Equal(t, workspace.LanguagePython, lang)

View File

@ -0,0 +1,77 @@
package notebook
import (
"fmt"
"io/fs"
"time"
"github.com/databricks/databricks-sdk-go/service/workspace"
)
type fakeFS struct {
fakeFile
}
type fakeFile struct {
fakeFileInfo
}
func (f fakeFile) Close() error {
return nil
}
func (f fakeFile) Read(p []byte) (n int, err error) {
return 0, fmt.Errorf("not implemented")
}
func (f fakeFile) Stat() (fs.FileInfo, error) {
return f.fakeFileInfo, nil
}
type fakeFileInfo struct {
oi workspace.ObjectInfo
}
func (f fakeFileInfo) WorkspaceObjectInfo() workspace.ObjectInfo {
return f.oi
}
func (f fakeFileInfo) Name() string {
return ""
}
func (f fakeFileInfo) Size() int64 {
return 0
}
func (f fakeFileInfo) Mode() fs.FileMode {
return 0
}
func (f fakeFileInfo) ModTime() time.Time {
return time.Time{}
}
func (f fakeFileInfo) IsDir() bool {
return false
}
func (f fakeFileInfo) Sys() any {
return nil
}
func (f fakeFS) Open(name string) (fs.File, error) {
return f.fakeFile, nil
}
func (f fakeFS) Stat(name string) (fs.FileInfo, error) {
panic("not implemented")
}
func (f fakeFS) ReadDir(name string) ([]fs.DirEntry, error) {
panic("not implemented")
}
func (f fakeFS) ReadFile(name string) ([]byte, error) {
panic("not implemented")
}