Compare commits

...

8 Commits

Author SHA1 Message Date
Andrew Nester b7b49c75ea
Merge fdc6edf1ea into 7d732ceba8 2024-11-15 16:11:24 +00:00
Pieter Noordhuis 7d732ceba8
Consolidate test helpers for `io/fs` (#1906)
## Changes

We had a number of copies of test helpers for `io/fs` in the repository.

This change consolidates all of them to use the `libs/fakefs` package.

## Tests

Unit tests pass.
2024-11-15 15:37:21 +00:00
Andrew Nester 7f3fb10c4a
Do not prepend paths starting with ~ or variable reference (#1905)
## Changes
Fixes #1904 

## Tests
Added regression test
2024-11-15 15:03:59 +00:00
Andrew Nester fdc6edf1ea
fixed integ test 2024-11-15 10:30:11 +01:00
Andrew Nester ae59d07199
fixes 2024-11-15 10:26:47 +01:00
Andrew Nester e6299493f8
Update cmd/bundle/generate/job.go
Co-authored-by: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com>
2024-11-15 10:13:08 +01:00
Andrew Nester 1c571962d2
fixes 2024-11-14 13:46:21 +01:00
Andrew Nester 9cea93d159
Update filenames used by bundle generate to use '.<resource-type>.yml' 2024-11-14 13:42:50 +01:00
15 changed files with 361 additions and 143 deletions

View File

@ -44,6 +44,11 @@ func (m *prependWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
return dyn.InvalidValue, fmt.Errorf("expected string, got %s", v.Kind())
}
// Skip prefixing if the path does not start with /, it might be variable reference or smth else.
if !strings.HasPrefix(path, "/") {
return pv, nil
}
for _, prefix := range skipPrefixes {
if strings.HasPrefix(path, prefix) {
return pv, nil

View File

@ -31,6 +31,14 @@ func TestPrependWorkspacePrefix(t *testing.T) {
path: "/Volumes/Users/test",
expected: "/Volumes/Users/test",
},
{
path: "~/test",
expected: "~/test",
},
{
path: "${workspace.file_path}/test",
expected: "${workspace.file_path}/test",
},
}
for _, tc := range testCases {

View File

@ -3,8 +3,10 @@ package generate
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"testing"
@ -90,7 +92,7 @@ func TestGeneratePipelineCommand(t *testing.T) {
err := cmd.RunE(cmd, []string{})
require.NoError(t, err)
data, err := os.ReadFile(filepath.Join(configDir, "test_pipeline.yml"))
data, err := os.ReadFile(filepath.Join(configDir, "test_pipeline.pipeline.yml"))
require.NoError(t, err)
require.Equal(t, fmt.Sprintf(`resources:
pipelines:
@ -186,7 +188,123 @@ func TestGenerateJobCommand(t *testing.T) {
err := cmd.RunE(cmd, []string{})
require.NoError(t, err)
data, err := os.ReadFile(filepath.Join(configDir, "test_job.yml"))
data, err := os.ReadFile(filepath.Join(configDir, "test_job.job.yml"))
require.NoError(t, err)
require.Equal(t, fmt.Sprintf(`resources:
jobs:
test_job:
name: test-job
job_clusters:
- new_cluster:
custom_tags:
"Tag1": "24X7-1234"
- new_cluster:
spark_conf:
"spark.databricks.delta.preview.enabled": "true"
tasks:
- task_key: notebook_task
notebook_task:
notebook_path: %s
parameters:
- name: empty
default: ""
`, filepath.Join("..", "src", "notebook.py")), string(data))
data, err = os.ReadFile(filepath.Join(srcDir, "notebook.py"))
require.NoError(t, err)
require.Equal(t, "# Databricks notebook source\nNotebook content", string(data))
}
func touchEmptyFile(t *testing.T, path string) {
err := os.MkdirAll(filepath.Dir(path), 0700)
require.NoError(t, err)
f, err := os.Create(path)
require.NoError(t, err)
f.Close()
}
func TestGenerateJobCommandOldFileRename(t *testing.T) {
cmd := NewGenerateJobCommand()
root := t.TempDir()
b := &bundle.Bundle{
BundleRootPath: root,
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
jobsApi := m.GetMockJobsAPI()
jobsApi.EXPECT().Get(mock.Anything, jobs.GetJobRequest{JobId: 1234}).Return(&jobs.Job{
Settings: &jobs.JobSettings{
Name: "test-job",
JobClusters: []jobs.JobCluster{
{NewCluster: compute.ClusterSpec{
CustomTags: map[string]string{
"Tag1": "24X7-1234",
},
}},
{NewCluster: compute.ClusterSpec{
SparkConf: map[string]string{
"spark.databricks.delta.preview.enabled": "true",
},
}},
},
Tasks: []jobs.Task{
{
TaskKey: "notebook_task",
NotebookTask: &jobs.NotebookTask{
NotebookPath: "/test/notebook",
},
},
},
Parameters: []jobs.JobParameterDefinition{
{
Name: "empty",
Default: "",
},
},
},
}, nil)
workspaceApi := m.GetMockWorkspaceAPI()
workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/test/notebook").Return(&workspace.ObjectInfo{
ObjectType: workspace.ObjectTypeNotebook,
Language: workspace.LanguagePython,
Path: "/test/notebook",
}, nil)
notebookContent := io.NopCloser(bytes.NewBufferString("# Databricks notebook source\nNotebook content"))
workspaceApi.EXPECT().Download(mock.Anything, "/test/notebook", mock.Anything).Return(notebookContent, nil)
cmd.SetContext(bundle.Context(context.Background(), b))
cmd.Flag("existing-job-id").Value.Set("1234")
configDir := filepath.Join(root, "resources")
cmd.Flag("config-dir").Value.Set(configDir)
srcDir := filepath.Join(root, "src")
cmd.Flag("source-dir").Value.Set(srcDir)
var key string
cmd.Flags().StringVar(&key, "key", "test_job", "")
// Create an old generated file first
oldFilename := filepath.Join(configDir, "test_job.yml")
touchEmptyFile(t, oldFilename)
// Having an existing files require --force flag to regenerate them
cmd.Flag("force").Value.Set("true")
err := cmd.RunE(cmd, []string{})
require.NoError(t, err)
// Make sure file do not exists after the run
_, err = os.Stat(oldFilename)
require.True(t, errors.Is(err, fs.ErrNotExist))
data, err := os.ReadFile(filepath.Join(configDir, "test_job.job.yml"))
require.NoError(t, err)
require.Equal(t, fmt.Sprintf(`resources:

View File

@ -1,7 +1,9 @@
package generate
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
@ -83,7 +85,17 @@ func NewGenerateJobCommand() *cobra.Command {
return err
}
filename := filepath.Join(configDir, fmt.Sprintf("%s.yml", jobKey))
oldFilename := filepath.Join(configDir, fmt.Sprintf("%s.yml", jobKey))
filename := filepath.Join(configDir, fmt.Sprintf("%s.job.yml", jobKey))
// User might continuously run generate command to update their bundle jobs with any changes made in Databricks UI.
// Due to changing in the generated file names, we need to first rename existing resource file to the new name.
// Otherwise users can end up with duplicated resources.
err = os.Rename(oldFilename, filename)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("failed to rename file %s. DABs uses the resource type as a sub-extension for generated content, please rename it to %s, err: %w", oldFilename, filename, err)
}
saver := yamlsaver.NewSaverWithStyle(map[string]yaml.Style{
// Including all JobSettings and nested fields which are map[string]string type
"spark_conf": yaml.DoubleQuotedStyle,

View File

@ -1,7 +1,9 @@
package generate
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
@ -83,7 +85,17 @@ func NewGeneratePipelineCommand() *cobra.Command {
return err
}
filename := filepath.Join(configDir, fmt.Sprintf("%s.yml", pipelineKey))
oldFilename := filepath.Join(configDir, fmt.Sprintf("%s.yml", pipelineKey))
filename := filepath.Join(configDir, fmt.Sprintf("%s.pipeline.yml", pipelineKey))
// User might continuously run generate command to update their bundle jobs with any changes made in Databricks UI.
// Due to changing in the generated file names, we need to first rename existing resource file to the new name.
// Otherwise users can end up with duplicated resources.
err = os.Rename(oldFilename, filename)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("failed to rename file %s. DABs uses the resource type as a sub-extension for generated content, please rename it to %s, err: %w", oldFilename, filename, err)
}
saver := yamlsaver.NewSaverWithStyle(
// Including all PipelineSpec and nested fields which are map[string]string type
map[string]yaml.Style{

View File

@ -7,6 +7,7 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/spf13/cobra"
@ -84,7 +85,7 @@ func setupTest(t *testing.T) (*validArgs, *cobra.Command, *mocks.MockWorkspaceCl
cmd, m := setupCommand(t)
fakeFilerForPath := func(ctx context.Context, fullPath string) (filer.Filer, string, error) {
fakeFiler := filer.NewFakeFiler(map[string]filer.FakeFileInfo{
fakeFiler := filer.NewFakeFiler(map[string]fakefs.FileInfo{
"dir": {FakeName: "root", FakeDir: true},
"dir/dirA": {FakeDir: true},
"dir/dirB": {FakeDir: true},

View File

@ -166,7 +166,7 @@ func TestAccGenerateAndBind(t *testing.T) {
_, err = os.Stat(filepath.Join(bundleRoot, "src", "test.py"))
require.NoError(t, err)
matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.yml"))
matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.job.yml"))
require.NoError(t, err)
require.Len(t, matches, 1)

View File

@ -1,18 +1,21 @@
package fakefs
import (
"fmt"
"io/fs"
"time"
)
var ErrNotImplemented = fmt.Errorf("not implemented")
// DirEntry is a fake implementation of [fs.DirEntry].
type DirEntry struct {
FileInfo
fs.FileInfo
}
func (entry DirEntry) Type() fs.FileMode {
typ := fs.ModePerm
if entry.FakeDir {
if entry.IsDir() {
typ |= fs.ModeDir
}
return typ
@ -53,3 +56,32 @@ func (info FileInfo) IsDir() bool {
func (info FileInfo) Sys() any {
return nil
}
// File is a fake implementation of [fs.File].
type File struct {
FileInfo fs.FileInfo
}
func (f File) Close() error {
return nil
}
func (f File) Read(p []byte) (n int, err error) {
return 0, ErrNotImplemented
}
func (f File) Stat() (fs.FileInfo, error) {
return f.FileInfo, nil
}
// FS is a fake implementation of [fs.FS].
type FS map[string]fs.File
func (f FS) Open(name string) (fs.File, error) {
e, ok := f[name]
if !ok {
return nil, fs.ErrNotExist
}
return e, nil
}

View File

@ -0,0 +1,38 @@
package fakefs
import (
"io/fs"
"testing"
"github.com/stretchr/testify/assert"
)
func TestFile(t *testing.T) {
var fakefile fs.File = File{
FileInfo: FileInfo{
FakeName: "file",
},
}
_, err := fakefile.Read([]byte{})
assert.ErrorIs(t, err, ErrNotImplemented)
fi, err := fakefile.Stat()
assert.NoError(t, err)
assert.Equal(t, "file", fi.Name())
err = fakefile.Close()
assert.NoError(t, err)
}
func TestFS(t *testing.T) {
var fakefs fs.FS = FS{
"file": File{},
}
_, err := fakefs.Open("doesntexist")
assert.ErrorIs(t, err, fs.ErrNotExist)
_, err = fakefs.Open("file")
assert.NoError(t, err)
}

View File

@ -6,6 +6,7 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/spf13/cobra"
@ -17,7 +18,7 @@ func setupCompleter(t *testing.T, onlyDirs bool) *completer {
// Needed to make type context.valueCtx for mockFilerForPath
ctx = root.SetWorkspaceClient(ctx, mocks.NewMockWorkspaceClient(t).WorkspaceClient)
fakeFiler := filer.NewFakeFiler(map[string]filer.FakeFileInfo{
fakeFiler := filer.NewFakeFiler(map[string]fakefs.FileInfo{
"dir": {FakeName: "root", FakeDir: true},
"dir/dirA": {FakeDir: true},
"dir/dirB": {FakeDir: true},

View File

@ -8,58 +8,12 @@ import (
"path"
"sort"
"strings"
"time"
"github.com/databricks/cli/libs/fakefs"
)
type FakeDirEntry struct {
FakeFileInfo
}
func (entry FakeDirEntry) Type() fs.FileMode {
typ := fs.ModePerm
if entry.FakeDir {
typ |= fs.ModeDir
}
return typ
}
func (entry FakeDirEntry) Info() (fs.FileInfo, error) {
return entry.FakeFileInfo, nil
}
type FakeFileInfo struct {
FakeName string
FakeSize int64
FakeDir bool
FakeMode fs.FileMode
}
func (info FakeFileInfo) Name() string {
return info.FakeName
}
func (info FakeFileInfo) Size() int64 {
return info.FakeSize
}
func (info FakeFileInfo) Mode() fs.FileMode {
return info.FakeMode
}
func (info FakeFileInfo) ModTime() time.Time {
return time.Now()
}
func (info FakeFileInfo) IsDir() bool {
return info.FakeDir
}
func (info FakeFileInfo) Sys() any {
return nil
}
type FakeFiler struct {
entries map[string]FakeFileInfo
entries map[string]fakefs.FileInfo
}
func (f *FakeFiler) Write(ctx context.Context, p string, reader io.Reader, mode ...WriteMode) error {
@ -97,7 +51,7 @@ func (f *FakeFiler) ReadDir(ctx context.Context, p string) ([]fs.DirEntry, error
continue
}
out = append(out, FakeDirEntry{v})
out = append(out, fakefs.DirEntry{FileInfo: v})
}
sort.Slice(out, func(i, j int) bool { return out[i].Name() < out[j].Name() })
@ -117,7 +71,11 @@ func (f *FakeFiler) Stat(ctx context.Context, path string) (fs.FileInfo, error)
return entry, nil
}
func NewFakeFiler(entries map[string]FakeFileInfo) *FakeFiler {
// NewFakeFiler creates a new fake [Filer] instance with the given entries.
// It sets the [Name] field of each entry to the base name of the path.
//
// This is meant to be used in tests.
func NewFakeFiler(entries map[string]fakefs.FileInfo) *FakeFiler {
fakeFiler := &FakeFiler{
entries: entries,
}

View File

@ -0,0 +1,98 @@
package filer
import (
"context"
"io"
"io/fs"
"testing"
"github.com/databricks/cli/libs/fakefs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFakeFiler_Read(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"file": {},
})
ctx := context.Background()
r, err := f.Read(ctx, "file")
require.NoError(t, err)
contents, err := io.ReadAll(r)
require.NoError(t, err)
// Contents of every file is "foo".
assert.Equal(t, "foo", string(contents))
}
func TestFakeFiler_Read_NotFound(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"foo": {},
})
ctx := context.Background()
_, err := f.Read(ctx, "bar")
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestFakeFiler_ReadDir_NotFound(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"dir1": {FakeDir: true},
})
ctx := context.Background()
_, err := f.ReadDir(ctx, "dir2")
assert.ErrorIs(t, err, fs.ErrNotExist)
}
func TestFakeFiler_ReadDir_NotADirectory(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"file": {},
})
ctx := context.Background()
_, err := f.ReadDir(ctx, "file")
assert.ErrorIs(t, err, fs.ErrInvalid)
}
func TestFakeFiler_ReadDir(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"dir1": {FakeDir: true},
"dir1/file2": {},
"dir1/dir2": {FakeDir: true},
})
ctx := context.Background()
entries, err := f.ReadDir(ctx, "dir1/")
require.NoError(t, err)
require.Len(t, entries, 2)
// The entries are sorted by name.
assert.Equal(t, "dir2", entries[0].Name())
assert.True(t, entries[0].IsDir())
assert.Equal(t, "file2", entries[1].Name())
assert.False(t, entries[1].IsDir())
}
func TestFakeFiler_Stat(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"file": {},
})
ctx := context.Background()
info, err := f.Stat(ctx, "file")
require.NoError(t, err)
assert.Equal(t, "file", info.Name())
}
func TestFakeFiler_Stat_NotFound(t *testing.T) {
f := NewFakeFiler(map[string]fakefs.FileInfo{
"foo": {},
})
ctx := context.Background()
_, err := f.Stat(ctx, "bar")
assert.ErrorIs(t, err, fs.ErrNotExist)
}

View File

@ -6,6 +6,7 @@ import (
"io/fs"
"testing"
"github.com/databricks/cli/libs/fakefs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -35,7 +36,7 @@ func TestFsDirImplementsFsReadDirFile(t *testing.T) {
}
func fakeFS() fs.FS {
fakeFiler := NewFakeFiler(map[string]FakeFileInfo{
fakeFiler := NewFakeFiler(map[string]fakefs.FileInfo{
".": {FakeName: "root", FakeDir: true},
"dirA": {FakeDir: true},
"dirB": {FakeDir: true},

View File

@ -7,6 +7,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/libs/fakefs"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -100,11 +101,21 @@ func TestDetectFileWithLongHeader(t *testing.T) {
assert.False(t, nb)
}
type fileInfoWithWorkspaceInfo struct {
fakefs.FileInfo
oi workspace.ObjectInfo
}
func (f fileInfoWithWorkspaceInfo) WorkspaceObjectInfo() workspace.ObjectInfo {
return f.oi
}
func TestDetectWithObjectInfo(t *testing.T) {
fakeFS := &fakeFS{
fakeFile{
fakeFileInfo{
workspace.ObjectInfo{
fakefs := fakefs.FS{
"file.py": fakefs.File{
FileInfo: fileInfoWithWorkspaceInfo{
oi: workspace.ObjectInfo{
ObjectType: workspace.ObjectTypeNotebook,
Language: workspace.LanguagePython,
},
@ -112,7 +123,7 @@ func TestDetectWithObjectInfo(t *testing.T) {
},
}
nb, lang, err := DetectWithFS(fakeFS, "doesntmatter")
nb, lang, err := DetectWithFS(fakefs, "file.py")
require.NoError(t, err)
assert.True(t, nb)
assert.Equal(t, workspace.LanguagePython, lang)

View File

@ -1,77 +0,0 @@
package notebook
import (
"fmt"
"io/fs"
"time"
"github.com/databricks/databricks-sdk-go/service/workspace"
)
type fakeFS struct {
fakeFile
}
type fakeFile struct {
fakeFileInfo
}
func (f fakeFile) Close() error {
return nil
}
func (f fakeFile) Read(p []byte) (n int, err error) {
return 0, fmt.Errorf("not implemented")
}
func (f fakeFile) Stat() (fs.FileInfo, error) {
return f.fakeFileInfo, nil
}
type fakeFileInfo struct {
oi workspace.ObjectInfo
}
func (f fakeFileInfo) WorkspaceObjectInfo() workspace.ObjectInfo {
return f.oi
}
func (f fakeFileInfo) Name() string {
return ""
}
func (f fakeFileInfo) Size() int64 {
return 0
}
func (f fakeFileInfo) Mode() fs.FileMode {
return 0
}
func (f fakeFileInfo) ModTime() time.Time {
return time.Time{}
}
func (f fakeFileInfo) IsDir() bool {
return false
}
func (f fakeFileInfo) Sys() any {
return nil
}
func (f fakeFS) Open(name string) (fs.File, error) {
return f.fakeFile, nil
}
func (f fakeFS) Stat(name string) (fs.FileInfo, error) {
panic("not implemented")
}
func (f fakeFS) ReadDir(name string) ([]fs.DirEntry, error) {
panic("not implemented")
}
func (f fakeFS) ReadFile(name string) ([]byte, error) {
panic("not implemented")
}