Return `diag.Diagnostics` from mutators (#1305)

## Changes

This diagnostics type allows us to capture multiple warnings as well as
errors in the return value. This is a preparation for returning
additional warnings from mutators in case we detect non-fatal problems.

* All return statements that previously returned an error now return
`diag.FromErr`
* All return statements that previously returned `fmt.Errorf` now return
`diag.Errorf`
* All `err != nil` checks now use `diags.HasError()` or `diags.Error()`

## Tests

* Existing tests pass.
* I confirmed no call site under `./bundle` or `./cmd/bundle` uses
`errors.Is` on the return value from mutators. This is relevant because
we cannot wrap errors with `%w` when calling `diag.Errorf` (like
`fmt.Errorf`; context in https://github.com/golang/go/issues/47641).
This commit is contained in:
Pieter Noordhuis 2024-03-25 15:18:47 +01:00 committed by GitHub
parent 9cf3dbe686
commit ed194668db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
141 changed files with 841 additions and 698 deletions

View File

@ -7,6 +7,7 @@ import (
"slices" "slices"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
) )
@ -21,7 +22,7 @@ func (m *all) Name() string {
return fmt.Sprintf("artifacts.%sAll", m.name) return fmt.Sprintf("artifacts.%sAll", m.name)
} }
func (m *all) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *all) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var out []bundle.Mutator var out []bundle.Mutator
// Iterate with stable ordering. // Iterate with stable ordering.
@ -31,7 +32,7 @@ func (m *all) Apply(ctx context.Context, b *bundle.Bundle) error {
for _, name := range keys { for _, name := range keys {
m, err := m.fn(name) m, err := m.fn(name)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
if m != nil { if m != nil {
out = append(out, m) out = append(out, m)

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -57,17 +58,17 @@ func (m *basicBuild) Name() string {
return fmt.Sprintf("artifacts.Build(%s)", m.name) return fmt.Sprintf("artifacts.Build(%s)", m.name)
} }
func (m *basicBuild) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *basicBuild) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact, ok := b.Config.Artifacts[m.name] artifact, ok := b.Config.Artifacts[m.name]
if !ok { if !ok {
return fmt.Errorf("artifact doesn't exist: %s", m.name) return diag.Errorf("artifact doesn't exist: %s", m.name)
} }
cmdio.LogString(ctx, fmt.Sprintf("Building %s...", m.name)) cmdio.LogString(ctx, fmt.Sprintf("Building %s...", m.name))
out, err := artifact.Build(ctx) out, err := artifact.Build(ctx)
if err != nil { if err != nil {
return fmt.Errorf("build for %s failed, error: %w, output: %s", m.name, err, out) return diag.Errorf("build for %s failed, error: %v, output: %s", m.name, err, out)
} }
log.Infof(ctx, "Build succeeded") log.Infof(ctx, "Build succeeded")
@ -87,29 +88,29 @@ func (m *basicUpload) Name() string {
return fmt.Sprintf("artifacts.Upload(%s)", m.name) return fmt.Sprintf("artifacts.Upload(%s)", m.name)
} }
func (m *basicUpload) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *basicUpload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact, ok := b.Config.Artifacts[m.name] artifact, ok := b.Config.Artifacts[m.name]
if !ok { if !ok {
return fmt.Errorf("artifact doesn't exist: %s", m.name) return diag.Errorf("artifact doesn't exist: %s", m.name)
} }
if len(artifact.Files) == 0 { if len(artifact.Files) == 0 {
return fmt.Errorf("artifact source is not configured: %s", m.name) return diag.Errorf("artifact source is not configured: %s", m.name)
} }
uploadPath, err := getUploadBasePath(b) uploadPath, err := getUploadBasePath(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
client, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath) client, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
err = uploadArtifact(ctx, b, artifact, uploadPath, client) err = uploadArtifact(ctx, b, artifact, uploadPath, client)
if err != nil { if err != nil {
return fmt.Errorf("upload for %s failed, error: %w", m.name, err) return diag.Errorf("upload for %s failed, error: %v", m.name, err)
} }
return nil return nil

View File

@ -5,6 +5,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/artifacts/whl"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -19,7 +20,7 @@ func (m *autodetect) Name() string {
return "artifacts.DetectPackages" return "artifacts.DetectPackages"
} }
func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// If artifacts section explicitly defined, do not try to auto detect packages // If artifacts section explicitly defined, do not try to auto detect packages
if b.Config.Artifacts != nil { if b.Config.Artifacts != nil {
log.Debugf(ctx, "artifacts block is defined, skipping auto-detecting") log.Debugf(ctx, "artifacts block is defined, skipping auto-detecting")

View File

@ -6,6 +6,7 @@ import (
"path/filepath" "path/filepath"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
func BuildAll() bundle.Mutator { func BuildAll() bundle.Mutator {
@ -27,10 +28,10 @@ func (m *build) Name() string {
return fmt.Sprintf("artifacts.Build(%s)", m.name) return fmt.Sprintf("artifacts.Build(%s)", m.name)
} }
func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact, ok := b.Config.Artifacts[m.name] artifact, ok := b.Config.Artifacts[m.name]
if !ok { if !ok {
return fmt.Errorf("artifact doesn't exist: %s", m.name) return diag.Errorf("artifact doesn't exist: %s", m.name)
} }
// Skip building if build command is not specified or infered // Skip building if build command is not specified or infered
@ -38,7 +39,7 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error {
// If no build command was specified or infered and there is no // If no build command was specified or infered and there is no
// artifact output files specified, artifact is misconfigured // artifact output files specified, artifact is misconfigured
if len(artifact.Files) == 0 { if len(artifact.Files) == 0 {
return fmt.Errorf("misconfigured artifact: please specify 'build' or 'files' property") return diag.Errorf("misconfigured artifact: please specify 'build' or 'files' property")
} }
return nil return nil
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/artifacts/whl"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
) )
var inferMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{ var inferMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{
@ -41,10 +42,10 @@ func (m *infer) Name() string {
return fmt.Sprintf("artifacts.Infer(%s)", m.name) return fmt.Sprintf("artifacts.Infer(%s)", m.name)
} }
func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact, ok := b.Config.Artifacts[m.name] artifact, ok := b.Config.Artifacts[m.name]
if !ok { if !ok {
return fmt.Errorf("artifact doesn't exist: %s", m.name) return diag.Errorf("artifact doesn't exist: %s", m.name)
} }
// only try to infer command if it's not already defined // only try to infer command if it's not already defined

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
) )
@ -33,14 +34,14 @@ func (m *upload) Name() string {
return fmt.Sprintf("artifacts.Upload(%s)", m.name) return fmt.Sprintf("artifacts.Upload(%s)", m.name)
} }
func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact, ok := b.Config.Artifacts[m.name] artifact, ok := b.Config.Artifacts[m.name]
if !ok { if !ok {
return fmt.Errorf("artifact doesn't exist: %s", m.name) return diag.Errorf("artifact doesn't exist: %s", m.name)
} }
if len(artifact.Files) == 0 { if len(artifact.Files) == 0 {
return fmt.Errorf("artifact source is not configured: %s", m.name) return diag.Errorf("artifact source is not configured: %s", m.name)
} }
// Check if source paths are absolute, if not, make them absolute // Check if source paths are absolute, if not, make them absolute
@ -57,11 +58,11 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error {
for _, f := range artifact.Files { for _, f := range artifact.Files {
matches, err := filepath.Glob(f.Source) matches, err := filepath.Glob(f.Source)
if err != nil { if err != nil {
return fmt.Errorf("unable to find files for %s: %w", f.Source, err) return diag.Errorf("unable to find files for %s: %v", f.Source, err)
} }
if len(matches) == 0 { if len(matches) == 0 {
return fmt.Errorf("no files found for %s", f.Source) return diag.Errorf("no files found for %s", f.Source)
} }
for _, match := range matches { for _, match := range matches {
@ -81,10 +82,10 @@ func (m *cleanUp) Name() string {
return "artifacts.CleanUp" return "artifacts.CleanUp"
} }
func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
uploadPath, err := getUploadBasePath(b) uploadPath, err := getUploadBasePath(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
b.WorkspaceClient().Workspace.Delete(ctx, workspace.Delete{ b.WorkspaceClient().Workspace.Delete(ctx, workspace.Delete{
@ -94,7 +95,7 @@ func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) error {
err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, uploadPath) err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, uploadPath)
if err != nil { if err != nil {
return fmt.Errorf("unable to create directory for %s: %w", uploadPath, err) return diag.Errorf("unable to create directory for %s: %v", uploadPath, err)
} }
return nil return nil

View File

@ -9,13 +9,14 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/bundletest" "github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/testfile" "github.com/databricks/cli/libs/testfile"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
type noop struct{} type noop struct{}
func (n *noop) Apply(context.Context, *bundle.Bundle) error { func (n *noop) Apply(context.Context, *bundle.Bundle) diag.Diagnostics {
return nil return nil
} }
@ -57,8 +58,8 @@ func TestExpandGlobFilesSource(t *testing.T) {
return &noop{} return &noop{}
} }
err = bundle.Apply(context.Background(), b, u) diags := bundle.Apply(context.Background(), b, u)
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, 2, len(b.Config.Artifacts["test"].Files)) require.Equal(t, 2, len(b.Config.Artifacts["test"].Files))
require.Equal(t, filepath.Join(rootPath, "test", "myjar1.jar"), b.Config.Artifacts["test"].Files[0].Source) require.Equal(t, filepath.Join(rootPath, "test", "myjar1.jar"), b.Config.Artifacts["test"].Files[0].Source)
@ -93,6 +94,6 @@ func TestExpandGlobFilesSourceWithNoMatches(t *testing.T) {
return &noop{} return &noop{}
} }
err = bundle.Apply(context.Background(), b, u) diags := bundle.Apply(context.Background(), b, u)
require.ErrorContains(t, err, "no files found for") require.ErrorContains(t, diags.Error(), "no files found for")
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -25,7 +26,7 @@ func (m *detectPkg) Name() string {
return "artifacts.whl.AutoDetect" return "artifacts.whl.AutoDetect"
} }
func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
wheelTasks := libraries.FindAllWheelTasksWithLocalLibraries(b) wheelTasks := libraries.FindAllWheelTasksWithLocalLibraries(b)
if len(wheelTasks) == 0 { if len(wheelTasks) == 0 {
log.Infof(ctx, "No local wheel tasks in databricks.yml config, skipping auto detect") log.Infof(ctx, "No local wheel tasks in databricks.yml config, skipping auto detect")
@ -50,7 +51,7 @@ func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error {
pkgPath, err := filepath.Abs(b.Config.Path) pkgPath, err := filepath.Abs(b.Config.Path)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
b.Config.Artifacts[module] = &config.Artifact{ b.Config.Artifacts[module] = &config.Artifact{
Path: pkgPath, Path: pkgPath,

View File

@ -9,6 +9,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/python" "github.com/databricks/cli/libs/python"
) )
@ -27,10 +28,10 @@ func (m *build) Name() string {
return fmt.Sprintf("artifacts.whl.Build(%s)", m.name) return fmt.Sprintf("artifacts.whl.Build(%s)", m.name)
} }
func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact, ok := b.Config.Artifacts[m.name] artifact, ok := b.Config.Artifacts[m.name]
if !ok { if !ok {
return fmt.Errorf("artifact doesn't exist: %s", m.name) return diag.Errorf("artifact doesn't exist: %s", m.name)
} }
cmdio.LogString(ctx, fmt.Sprintf("Building %s...", m.name)) cmdio.LogString(ctx, fmt.Sprintf("Building %s...", m.name))
@ -43,13 +44,13 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error {
out, err := artifact.Build(ctx) out, err := artifact.Build(ctx)
if err != nil { if err != nil {
return fmt.Errorf("build failed %s, error: %w, output: %s", m.name, err, out) return diag.Errorf("build failed %s, error: %v, output: %s", m.name, err, out)
} }
log.Infof(ctx, "Build succeeded") log.Infof(ctx, "Build succeeded")
wheels := python.FindFilesWithSuffixInPath(distPath, ".whl") wheels := python.FindFilesWithSuffixInPath(distPath, ".whl")
if len(wheels) == 0 { if len(wheels) == 0 {
return fmt.Errorf("cannot find built wheel in %s for package %s", dir, m.name) return diag.Errorf("cannot find built wheel in %s for package %s", dir, m.name)
} }
for _, wheel := range wheels { for _, wheel := range wheels {
artifact.Files = append(artifact.Files, config.ArtifactFile{ artifact.Files = append(artifact.Files, config.ArtifactFile{

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -20,7 +21,7 @@ func (m *fromLibraries) Name() string {
return "artifacts.whl.DefineArtifactsFromLibraries" return "artifacts.whl.DefineArtifactsFromLibraries"
} }
func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) error { func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if len(b.Config.Artifacts) != 0 { if len(b.Config.Artifacts) != 0 {
log.Debugf(ctx, "Skipping defining artifacts from libraries because artifacts section is explicitly defined") log.Debugf(ctx, "Skipping defining artifacts from libraries because artifacts section is explicitly defined")
return nil return nil

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/python" "github.com/databricks/cli/libs/python"
) )
@ -12,11 +13,11 @@ type infer struct {
name string name string
} }
func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
artifact := b.Config.Artifacts[m.name] artifact := b.Config.Artifacts[m.name]
py, err := python.DetectExecutable(ctx) py, err := python.DetectExecutable(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Note: using --build-number (build tag) flag does not help with re-installing // Note: using --build-number (build tag) flag does not help with re-installing

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
) )
type defineDefaultTarget struct { type defineDefaultTarget struct {
@ -24,7 +25,7 @@ func (m *defineDefaultTarget) Name() string {
return fmt.Sprintf("DefineDefaultTarget(%s)", m.name) return fmt.Sprintf("DefineDefaultTarget(%s)", m.name)
} }
func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) error { func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
// Nothing to do if the configuration has at least 1 target. // Nothing to do if the configuration has at least 1 target.
if len(b.Config.Targets) > 0 { if len(b.Config.Targets) > 0 {
return nil return nil

View File

@ -13,8 +13,9 @@ import (
func TestDefaultTarget(t *testing.T) { func TestDefaultTarget(t *testing.T) {
b := &bundle.Bundle{} b := &bundle.Bundle{}
err := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget())
require.NoError(t, err) require.NoError(t, diags.Error())
env, ok := b.Config.Targets["default"] env, ok := b.Config.Targets["default"]
assert.True(t, ok) assert.True(t, ok)
assert.Equal(t, &config.Target{}, env) assert.Equal(t, &config.Target{}, env)
@ -28,8 +29,9 @@ func TestDefaultTargetAlreadySpecified(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget())
require.NoError(t, err) require.NoError(t, diags.Error())
_, ok := b.Config.Targets["default"] _, ok := b.Config.Targets["default"]
assert.False(t, ok) assert.False(t, ok)
} }

View File

@ -2,10 +2,10 @@ package mutator
import ( import (
"context" "context"
"fmt"
"path" "path"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type defineDefaultWorkspacePaths struct{} type defineDefaultWorkspacePaths struct{}
@ -19,10 +19,10 @@ func (m *defineDefaultWorkspacePaths) Name() string {
return "DefaultWorkspacePaths" return "DefaultWorkspacePaths"
} }
func (m *defineDefaultWorkspacePaths) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *defineDefaultWorkspacePaths) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
root := b.Config.Workspace.RootPath root := b.Config.Workspace.RootPath
if root == "" { if root == "" {
return fmt.Errorf("unable to define default workspace paths: workspace root not defined") return diag.Errorf("unable to define default workspace paths: workspace root not defined")
} }
if b.Config.Workspace.FilePath == "" { if b.Config.Workspace.FilePath == "" {

View File

@ -19,8 +19,8 @@ func TestDefineDefaultWorkspacePaths(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "/files", b.Config.Workspace.FilePath) assert.Equal(t, "/files", b.Config.Workspace.FilePath)
assert.Equal(t, "/artifacts", b.Config.Workspace.ArtifactPath) assert.Equal(t, "/artifacts", b.Config.Workspace.ArtifactPath)
assert.Equal(t, "/state", b.Config.Workspace.StatePath) assert.Equal(t, "/state", b.Config.Workspace.StatePath)
@ -37,8 +37,8 @@ func TestDefineDefaultWorkspacePathsAlreadySet(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "/foo/bar", b.Config.Workspace.FilePath) assert.Equal(t, "/foo/bar", b.Config.Workspace.FilePath)
assert.Equal(t, "/foo/bar", b.Config.Workspace.ArtifactPath) assert.Equal(t, "/foo/bar", b.Config.Workspace.ArtifactPath)
assert.Equal(t, "/foo/bar", b.Config.Workspace.StatePath) assert.Equal(t, "/foo/bar", b.Config.Workspace.StatePath)

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type defineDefaultWorkspaceRoot struct{} type defineDefaultWorkspaceRoot struct{}
@ -18,17 +19,17 @@ func (m *defineDefaultWorkspaceRoot) Name() string {
return "DefineDefaultWorkspaceRoot" return "DefineDefaultWorkspaceRoot"
} }
func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if b.Config.Workspace.RootPath != "" { if b.Config.Workspace.RootPath != "" {
return nil return nil
} }
if b.Config.Bundle.Name == "" { if b.Config.Bundle.Name == "" {
return fmt.Errorf("unable to define default workspace root: bundle name not defined") return diag.Errorf("unable to define default workspace root: bundle name not defined")
} }
if b.Config.Bundle.Target == "" { if b.Config.Bundle.Target == "" {
return fmt.Errorf("unable to define default workspace root: bundle target not selected") return diag.Errorf("unable to define default workspace root: bundle target not selected")
} }
b.Config.Workspace.RootPath = fmt.Sprintf( b.Config.Workspace.RootPath = fmt.Sprintf(

View File

@ -20,7 +20,8 @@ func TestDefaultWorkspaceRoot(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspaceRoot()) diags := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspaceRoot())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "~/.bundle/name/environment", b.Config.Workspace.RootPath) assert.Equal(t, "~/.bundle/name/environment", b.Config.Workspace.RootPath)
} }

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
@ -18,7 +19,7 @@ func (m *environmentsToTargets) Name() string {
return "EnvironmentsToTargets" return "EnvironmentsToTargets"
} }
func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// Short circuit if the "environments" key is not set. // Short circuit if the "environments" key is not set.
// This is the common case. // This is the common case.
if b.Config.Environments == nil { if b.Config.Environments == nil {
@ -26,7 +27,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err
} }
// The "environments" key is set; validate and rewrite it to "targets". // The "environments" key is set; validate and rewrite it to "targets".
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
environments := v.Get("environments") environments := v.Get("environments")
targets := v.Get("targets") targets := v.Get("targets")
@ -60,4 +61,6 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err
return v, nil return v, nil
}) })
return diag.FromErr(err)
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) { func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) {
@ -26,8 +27,8 @@ func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
assert.ErrorContains(t, err, `both 'environments' and 'targets' are specified;`) assert.ErrorContains(t, diags.Error(), `both 'environments' and 'targets' are specified;`)
} }
func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) { func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) {
@ -41,8 +42,8 @@ func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
assert.NoError(t, err) require.NoError(t, diags.Error())
assert.Len(t, b.Config.Environments, 0) assert.Len(t, b.Config.Environments, 0)
assert.Len(t, b.Config.Targets, 1) assert.Len(t, b.Config.Targets, 1)
} }
@ -58,8 +59,8 @@ func TestEnvironmentsToTargetsWithTargetsDefined(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
assert.NoError(t, err) require.NoError(t, diags.Error())
assert.Len(t, b.Config.Environments, 0) assert.Len(t, b.Config.Environments, 0)
assert.Len(t, b.Config.Targets, 1) assert.Len(t, b.Config.Targets, 1)
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
@ -92,8 +93,8 @@ func (m *expandPipelineGlobPaths) expandSequence(p dyn.Path, v dyn.Value) (dyn.V
return dyn.NewValue(vs, v.Location()), nil return dyn.NewValue(vs, v.Location()), nil
} }
func (m *expandPipelineGlobPaths) Apply(_ context.Context, b *bundle.Bundle) error { func (m *expandPipelineGlobPaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
p := dyn.NewPattern( p := dyn.NewPattern(
dyn.Key("resources"), dyn.Key("resources"),
dyn.Key("pipelines"), dyn.Key("pipelines"),
@ -104,6 +105,8 @@ func (m *expandPipelineGlobPaths) Apply(_ context.Context, b *bundle.Bundle) err
// Visit each pipeline's "libraries" field and expand any glob patterns. // Visit each pipeline's "libraries" field and expand any glob patterns.
return dyn.MapByPattern(v, p, m.expandSequence) return dyn.MapByPattern(v, p, m.expandSequence)
}) })
return diag.FromErr(err)
} }
func (*expandPipelineGlobPaths) Name() string { func (*expandPipelineGlobPaths) Name() string {

View File

@ -109,8 +109,8 @@ func TestExpandGlobPathsInPipelines(t *testing.T) {
bundletest.SetLocation(b, "resources.pipelines.pipeline.libraries[3]", filepath.Join(dir, "relative", "resource.yml")) bundletest.SetLocation(b, "resources.pipelines.pipeline.libraries[3]", filepath.Join(dir, "relative", "resource.yml"))
m := ExpandPipelineGlobPaths() m := ExpandPipelineGlobPaths()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
libraries := b.Config.Resources.Pipelines["pipeline"].Libraries libraries := b.Config.Resources.Pipelines["pipeline"].Libraries
require.Len(t, libraries, 13) require.Len(t, libraries, 13)

View File

@ -7,6 +7,7 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type expandWorkspaceRoot struct{} type expandWorkspaceRoot struct{}
@ -20,15 +21,15 @@ func (m *expandWorkspaceRoot) Name() string {
return "ExpandWorkspaceRoot" return "ExpandWorkspaceRoot"
} }
func (m *expandWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *expandWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
root := b.Config.Workspace.RootPath root := b.Config.Workspace.RootPath
if root == "" { if root == "" {
return fmt.Errorf("unable to expand workspace root: workspace root not defined") return diag.Errorf("unable to expand workspace root: workspace root not defined")
} }
currentUser := b.Config.Workspace.CurrentUser currentUser := b.Config.Workspace.CurrentUser
if currentUser == nil || currentUser.UserName == "" { if currentUser == nil || currentUser.UserName == "" {
return fmt.Errorf("unable to expand workspace root: current user not set") return diag.Errorf("unable to expand workspace root: current user not set")
} }
if strings.HasPrefix(root, "~/") { if strings.HasPrefix(root, "~/") {

View File

@ -25,8 +25,8 @@ func TestExpandWorkspaceRoot(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "/Users/jane@doe.com/foo", b.Config.Workspace.RootPath) assert.Equal(t, "/Users/jane@doe.com/foo", b.Config.Workspace.RootPath)
} }
@ -43,8 +43,8 @@ func TestExpandWorkspaceRootDoesNothing(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "/Users/charly@doe.com/foo", b.Config.Workspace.RootPath) assert.Equal(t, "/Users/charly@doe.com/foo", b.Config.Workspace.RootPath)
} }
@ -60,8 +60,8 @@ func TestExpandWorkspaceRootWithoutRoot(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot())
require.Error(t, err) require.True(t, diags.HasError())
} }
func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) { func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) {
@ -72,6 +72,6 @@ func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) diags := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot())
require.Error(t, err) require.True(t, diags.HasError())
} }

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type ifMutator struct { type ifMutator struct {
@ -22,7 +23,7 @@ func If(
} }
} }
func (m *ifMutator) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *ifMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if m.condition(b) { if m.condition(b) {
return bundle.Apply(ctx, b, m.onTrueMutator) return bundle.Apply(ctx, b, m.onTrueMutator)
} else { } else {

View File

@ -5,6 +5,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/diag"
) )
type initializeVariables struct{} type initializeVariables struct{}
@ -18,7 +19,7 @@ func (m *initializeVariables) Name() string {
return "InitializeVariables" return "InitializeVariables"
} }
func (m *initializeVariables) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *initializeVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
vars := b.Config.Variables vars := b.Config.Variables
for k, v := range vars { for k, v := range vars {
if v == nil { if v == nil {

View File

@ -23,8 +23,8 @@ func TestInitializeVariables(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.InitializeVariables()) diags := bundle.Apply(context.Background(), b, mutator.InitializeVariables())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.NotNil(t, b.Config.Variables["foo"]) assert.NotNil(t, b.Config.Variables["foo"])
assert.NotNil(t, b.Config.Variables["bar"]) assert.NotNil(t, b.Config.Variables["bar"])
assert.Equal(t, "This is a description", b.Config.Variables["bar"].Description) assert.Equal(t, "This is a description", b.Config.Variables["bar"].Description)
@ -36,7 +36,7 @@ func TestInitializeVariablesWithoutVariables(t *testing.T) {
Variables: nil, Variables: nil,
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.InitializeVariables()) diags := bundle.Apply(context.Background(), b, mutator.InitializeVariables())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Nil(t, b.Config.Variables) assert.Nil(t, b.Config.Variables)
} }

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type initializeWorkspaceClient struct{} type initializeWorkspaceClient struct{}
@ -19,7 +20,7 @@ func (m *initializeWorkspaceClient) Name() string {
// Apply initializes the workspace client for the bundle. We do this here so // Apply initializes the workspace client for the bundle. We do this here so
// downstream calls to b.WorkspaceClient() do not panic if there's an error in the // downstream calls to b.WorkspaceClient() do not panic if there's an error in the
// auth configuration. // auth configuration.
func (m *initializeWorkspaceClient) Apply(_ context.Context, b *bundle.Bundle) error { func (m *initializeWorkspaceClient) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
_, err := b.InitializeWorkspaceClient() _, err := b.InitializeWorkspaceClient()
return err return diag.FromErr(err)
} }

View File

@ -5,6 +5,7 @@ import (
"path/filepath" "path/filepath"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -19,11 +20,11 @@ func (m *loadGitDetails) Name() string {
return "LoadGitDetails" return "LoadGitDetails"
} }
func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// Load relevant git repository // Load relevant git repository
repo, err := git.NewRepository(b.Config.Path) repo, err := git.NewRepository(b.Config.Path)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Read branch name of current checkout // Read branch name of current checkout
@ -57,12 +58,12 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error {
// Compute relative path of the bundle root from the Git repo root. // Compute relative path of the bundle root from the Git repo root.
absBundlePath, err := filepath.Abs(b.Config.Path) absBundlePath, err := filepath.Abs(b.Config.Path)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// repo.Root() returns the absolute path of the repo // repo.Root() returns the absolute path of the repo
relBundlePath, err := filepath.Rel(repo.Root(), absBundlePath) relBundlePath, err := filepath.Rel(repo.Root(), absBundlePath)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
b.Config.Bundle.Git.BundleRootPath = filepath.ToSlash(relBundlePath) b.Config.Bundle.Git.BundleRootPath = filepath.ToSlash(relBundlePath)
return nil return nil

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
) )
@ -29,8 +30,8 @@ func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string {
} }
} }
func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v == dyn.NilValue { if v == dyn.NilValue {
return v, nil return v, nil
} }
@ -39,4 +40,6 @@ func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error {
return dyn.Map(job, "job_clusters", merge.ElementsByKey("job_cluster_key", m.jobClusterKey)) return dyn.Map(job, "job_clusters", merge.ElementsByKey("job_cluster_key", m.jobClusterKey))
})) }))
}) })
return diag.FromErr(err)
} }

View File

@ -50,8 +50,8 @@ func TestMergeJobClusters(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) diags := bundle.Apply(context.Background(), b, mutator.MergeJobClusters())
assert.NoError(t, err) assert.NoError(t, diags.Error())
j := b.Config.Resources.Jobs["foo"] j := b.Config.Resources.Jobs["foo"]
@ -99,7 +99,7 @@ func TestMergeJobClustersWithNilKey(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) diags := bundle.Apply(context.Background(), b, mutator.MergeJobClusters())
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1)
} }

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
) )
@ -29,8 +30,8 @@ func (m *mergeJobTasks) taskKeyString(v dyn.Value) string {
} }
} }
func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v == dyn.NilValue { if v == dyn.NilValue {
return v, nil return v, nil
} }
@ -39,4 +40,6 @@ func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error {
return dyn.Map(job, "tasks", merge.ElementsByKey("task_key", m.taskKeyString)) return dyn.Map(job, "tasks", merge.ElementsByKey("task_key", m.taskKeyString))
})) }))
}) })
return diag.FromErr(err)
} }

View File

@ -58,8 +58,8 @@ func TestMergeJobTasks(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) diags := bundle.Apply(context.Background(), b, mutator.MergeJobTasks())
assert.NoError(t, err) assert.NoError(t, diags.Error())
j := b.Config.Resources.Jobs["foo"] j := b.Config.Resources.Jobs["foo"]
@ -111,7 +111,7 @@ func TestMergeJobTasksWithNilKey(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) diags := bundle.Apply(context.Background(), b, mutator.MergeJobTasks())
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 1) assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 1)
} }

View File

@ -5,6 +5,7 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
) )
@ -32,8 +33,8 @@ func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string {
} }
} }
func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v == dyn.NilValue { if v == dyn.NilValue {
return v, nil return v, nil
} }
@ -42,4 +43,6 @@ func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) err
return dyn.Map(pipeline, "clusters", merge.ElementsByKey("label", m.clusterLabel)) return dyn.Map(pipeline, "clusters", merge.ElementsByKey("label", m.clusterLabel))
})) }))
}) })
return diag.FromErr(err)
} }

View File

@ -42,8 +42,8 @@ func TestMergePipelineClusters(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
assert.NoError(t, err) assert.NoError(t, diags.Error())
p := b.Config.Resources.Pipelines["foo"] p := b.Config.Resources.Pipelines["foo"]
@ -86,8 +86,8 @@ func TestMergePipelineClustersCaseInsensitive(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
assert.NoError(t, err) assert.NoError(t, diags.Error())
p := b.Config.Resources.Pipelines["foo"] p := b.Config.Resources.Pipelines["foo"]
assert.Len(t, p.Clusters, 1) assert.Len(t, p.Clusters, 1)
@ -107,8 +107,8 @@ func TestMergePipelineClustersNilPipelines(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
assert.NoError(t, err) assert.NoError(t, diags.Error())
} }
func TestMergePipelineClustersEmptyPipelines(t *testing.T) { func TestMergePipelineClustersEmptyPipelines(t *testing.T) {
@ -120,6 +120,6 @@ func TestMergePipelineClustersEmptyPipelines(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) diags := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
assert.NoError(t, err) assert.NoError(t, diags.Error())
} }

View File

@ -4,11 +4,12 @@ import (
"context" "context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type noop struct{} type noop struct{}
func (*noop) Apply(context.Context, *bundle.Bundle) error { func (*noop) Apply(context.Context, *bundle.Bundle) diag.Diagnostics {
return nil return nil
} }

View File

@ -2,11 +2,11 @@ package mutator
import ( import (
"context" "context"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
) )
@ -32,10 +32,10 @@ func overrideJobCompute(j *resources.Job, compute string) {
} }
} }
func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if b.Config.Bundle.Mode != config.Development { if b.Config.Bundle.Mode != config.Development {
if b.Config.Bundle.ComputeID != "" { if b.Config.Bundle.ComputeID != "" {
return fmt.Errorf("cannot override compute for an target that does not use 'mode: development'") return diag.Errorf("cannot override compute for an target that does not use 'mode: development'")
} }
return nil return nil
} }

View File

@ -49,8 +49,8 @@ func TestOverrideDevelopment(t *testing.T) {
} }
m := mutator.OverrideCompute() m := mutator.OverrideCompute()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster)
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId)
@ -85,8 +85,8 @@ func TestOverrideDevelopmentEnv(t *testing.T) {
} }
m := mutator.OverrideCompute() m := mutator.OverrideCompute()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId)
} }
@ -110,8 +110,8 @@ func TestOverridePipelineTask(t *testing.T) {
} }
m := mutator.OverrideCompute() m := mutator.OverrideCompute()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
} }
@ -140,8 +140,8 @@ func TestOverrideProduction(t *testing.T) {
} }
m := mutator.OverrideCompute() m := mutator.OverrideCompute()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.Error(t, err) require.True(t, diags.HasError())
} }
func TestOverrideProductionEnv(t *testing.T) { func TestOverrideProductionEnv(t *testing.T) {
@ -167,6 +167,6 @@ func TestOverrideProductionEnv(t *testing.T) {
} }
m := mutator.OverrideCompute() m := mutator.OverrideCompute()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/tags" "github.com/databricks/cli/libs/tags"
) )
@ -20,7 +21,7 @@ func (m *populateCurrentUser) Name() string {
return "PopulateCurrentUser" return "PopulateCurrentUser"
} }
func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if b.Config.Workspace.CurrentUser != nil { if b.Config.Workspace.CurrentUser != nil {
return nil return nil
} }
@ -28,7 +29,7 @@ func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error
w := b.WorkspaceClient() w := b.WorkspaceClient()
me, err := w.CurrentUser.Me(ctx) me, err := w.CurrentUser.Me(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
b.Config.Workspace.CurrentUser = &config.User{ b.Config.Workspace.CurrentUser = &config.User{

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
) )
type processInclude struct { type processInclude struct {
@ -25,10 +26,12 @@ func (m *processInclude) Name() string {
return fmt.Sprintf("ProcessInclude(%s)", m.relPath) return fmt.Sprintf("ProcessInclude(%s)", m.relPath)
} }
func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) error { func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
this, err := config.Load(m.fullPath) this, err := config.Load(m.fullPath)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return b.Config.Merge(this) // TODO: Return actual warnings.
err = b.Config.Merge(this)
return diag.FromErr(err)
} }

View File

@ -32,7 +32,7 @@ func TestProcessInclude(t *testing.T) {
f.Close() f.Close()
assert.Equal(t, "foo", b.Config.Workspace.Host) assert.Equal(t, "foo", b.Config.Workspace.Host)
err = bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath)) diags := bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath))
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "bar", b.Config.Workspace.Host) assert.Equal(t, "bar", b.Config.Workspace.Host)
} }

View File

@ -2,7 +2,6 @@ package mutator
import ( import (
"context" "context"
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"slices" "slices"
@ -11,6 +10,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/env"
"github.com/databricks/cli/libs/diag"
) )
// Get extra include paths from environment variable // Get extra include paths from environment variable
@ -34,7 +34,7 @@ func (m *processRootIncludes) Name() string {
return "ProcessRootIncludes" return "ProcessRootIncludes"
} }
func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var out []bundle.Mutator var out []bundle.Mutator
// Map with files we've already seen to avoid loading them twice. // Map with files we've already seen to avoid loading them twice.
@ -53,7 +53,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error
if filepath.IsAbs(extraIncludePath) { if filepath.IsAbs(extraIncludePath) {
rel, err := filepath.Rel(b.Config.Path, extraIncludePath) rel, err := filepath.Rel(b.Config.Path, extraIncludePath)
if err != nil { if err != nil {
return fmt.Errorf("unable to include file '%s': %w", extraIncludePath, err) return diag.Errorf("unable to include file '%s': %v", extraIncludePath, err)
} }
extraIncludePath = rel extraIncludePath = rel
} }
@ -66,19 +66,19 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error
for _, entry := range b.Config.Include { for _, entry := range b.Config.Include {
// Include paths must be relative. // Include paths must be relative.
if filepath.IsAbs(entry) { if filepath.IsAbs(entry) {
return fmt.Errorf("%s: includes must be relative paths", entry) return diag.Errorf("%s: includes must be relative paths", entry)
} }
// Anchor includes to the bundle root path. // Anchor includes to the bundle root path.
matches, err := filepath.Glob(filepath.Join(b.Config.Path, entry)) matches, err := filepath.Glob(filepath.Join(b.Config.Path, entry))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// If the entry is not a glob pattern and no matches found, // If the entry is not a glob pattern and no matches found,
// return an error because the file defined is not found // return an error because the file defined is not found
if len(matches) == 0 && !strings.ContainsAny(entry, "*?[") { if len(matches) == 0 && !strings.ContainsAny(entry, "*?[") {
return fmt.Errorf("%s defined in 'include' section does not match any files", entry) return diag.Errorf("%s defined in 'include' section does not match any files", entry)
} }
// Filter matches to ones we haven't seen yet. // Filter matches to ones we haven't seen yet.
@ -86,7 +86,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error
for _, match := range matches { for _, match := range matches {
rel, err := filepath.Rel(b.Config.Path, match) rel, err := filepath.Rel(b.Config.Path, match)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
if _, ok := seen[rel]; ok { if _, ok := seen[rel]; ok {
continue continue

View File

@ -23,8 +23,8 @@ func TestProcessRootIncludesEmpty(t *testing.T) {
Path: ".", Path: ".",
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.NoError(t, err) require.NoError(t, diags.Error())
} }
func TestProcessRootIncludesAbs(t *testing.T) { func TestProcessRootIncludesAbs(t *testing.T) {
@ -43,9 +43,9 @@ func TestProcessRootIncludesAbs(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.Error(t, err) require.True(t, diags.HasError())
assert.Contains(t, err.Error(), "must be relative paths") assert.ErrorContains(t, diags.Error(), "must be relative paths")
} }
func TestProcessRootIncludesSingleGlob(t *testing.T) { func TestProcessRootIncludesSingleGlob(t *testing.T) {
@ -62,9 +62,8 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) {
testutil.Touch(t, b.Config.Path, "a.yml") testutil.Touch(t, b.Config.Path, "a.yml")
testutil.Touch(t, b.Config.Path, "b.yml") testutil.Touch(t, b.Config.Path, "b.yml")
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include) assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include)
} }
@ -82,9 +81,8 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) {
testutil.Touch(t, b.Config.Path, "a1.yml") testutil.Touch(t, b.Config.Path, "a1.yml")
testutil.Touch(t, b.Config.Path, "b1.yml") testutil.Touch(t, b.Config.Path, "b1.yml")
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include) assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include)
} }
@ -101,8 +99,8 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) {
testutil.Touch(t, b.Config.Path, "a.yml") testutil.Touch(t, b.Config.Path, "a.yml")
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, []string{"a.yml"}, b.Config.Include) assert.Equal(t, []string{"a.yml"}, b.Config.Include)
} }
@ -115,9 +113,9 @@ func TestProcessRootIncludesNotExists(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.Error(t, err) require.True(t, diags.HasError())
assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") assert.ErrorContains(t, diags.Error(), "notexist.yml defined in 'include' section does not match any files")
} }
func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) {
@ -132,8 +130,8 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Contains(t, b.Config.Include, testYamlName) assert.Contains(t, b.Config.Include, testYamlName)
} }
@ -155,7 +153,7 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) diags := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, []string{testYamlName}, b.Config.Include) assert.Equal(t, []string{testYamlName}, b.Config.Include)
} }

View File

@ -2,13 +2,13 @@ package mutator
import ( import (
"context" "context"
"fmt"
"path" "path"
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/ml"
@ -29,7 +29,7 @@ func (m *processTargetMode) Name() string {
// Mark all resources as being for 'development' purposes, i.e. // Mark all resources as being for 'development' purposes, i.e.
// changing their their name, adding tags, and (in the future) // changing their their name, adding tags, and (in the future)
// marking them as 'hidden' in the UI. // marking them as 'hidden' in the UI.
func transformDevelopmentMode(b *bundle.Bundle) error { func transformDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
r := b.Config.Resources r := b.Config.Resources
shortName := b.Config.Workspace.CurrentUser.ShortName shortName := b.Config.Workspace.CurrentUser.ShortName
@ -100,9 +100,9 @@ func transformDevelopmentMode(b *bundle.Bundle) error {
return nil return nil
} }
func validateDevelopmentMode(b *bundle.Bundle) error { func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
if path := findNonUserPath(b); path != "" { if path := findNonUserPath(b); path != "" {
return fmt.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path) return diag.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path)
} }
return nil return nil
} }
@ -125,7 +125,7 @@ func findNonUserPath(b *bundle.Bundle) string {
return "" return ""
} }
func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) diag.Diagnostics {
if b.Config.Bundle.Git.Inferred { if b.Config.Bundle.Git.Inferred {
env := b.Config.Bundle.Target env := b.Config.Bundle.Target
log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env) log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env)
@ -134,12 +134,12 @@ func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUs
r := b.Config.Resources r := b.Config.Resources
for i := range r.Pipelines { for i := range r.Pipelines {
if r.Pipelines[i].Development { if r.Pipelines[i].Development {
return fmt.Errorf("target with 'mode: production' cannot include a pipeline with 'development: true'") return diag.Errorf("target with 'mode: production' cannot include a pipeline with 'development: true'")
} }
} }
if !isPrincipalUsed && !isRunAsSet(r) { if !isPrincipalUsed && !isRunAsSet(r) {
return fmt.Errorf("'run_as' must be set for all jobs when using 'mode: production'") return diag.Errorf("'run_as' must be set for all jobs when using 'mode: production'")
} }
return nil return nil
} }
@ -156,12 +156,12 @@ func isRunAsSet(r config.Resources) bool {
return true return true
} }
func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
switch b.Config.Bundle.Mode { switch b.Config.Bundle.Mode {
case config.Development: case config.Development:
err := validateDevelopmentMode(b) diags := validateDevelopmentMode(b)
if err != nil { if diags != nil {
return err return diags
} }
return transformDevelopmentMode(b) return transformDevelopmentMode(b)
case config.Production: case config.Production:
@ -170,7 +170,7 @@ func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error {
case "": case "":
// No action // No action
default: default:
return fmt.Errorf("unsupported value '%s' specified for 'mode': must be either 'development' or 'production'", b.Config.Bundle.Mode) return diag.Errorf("unsupported value '%s' specified for 'mode': must be either 'development' or 'production'", b.Config.Bundle.Mode)
} }
return nil return nil

View File

@ -110,8 +110,8 @@ func TestProcessTargetModeDevelopment(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
m := ProcessTargetMode() m := ProcessTargetMode()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
// Job 1 // Job 1
assert.Equal(t, "[dev lennart] job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "[dev lennart] job1", b.Config.Resources.Jobs["job1"].Name)
@ -154,8 +154,8 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
err := bundle.Apply(context.Background(), b, ProcessTargetMode()) diags := bundle.Apply(context.Background(), b, ProcessTargetMode())
require.NoError(t, err) require.NoError(t, diags.Error())
// Assert that tag normalization took place. // Assert that tag normalization took place.
assert.Equal(t, "Hello world__", b.Config.Resources.Jobs["job1"].Tags["dev"]) assert.Equal(t, "Hello world__", b.Config.Resources.Jobs["job1"].Tags["dev"])
@ -168,8 +168,8 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
err := bundle.Apply(context.Background(), b, ProcessTargetMode()) diags := bundle.Apply(context.Background(), b, ProcessTargetMode())
require.NoError(t, err) require.NoError(t, diags.Error())
// Assert that tag normalization took place (Azure allows more characters than AWS). // Assert that tag normalization took place (Azure allows more characters than AWS).
assert.Equal(t, "Héllö wörld?!", b.Config.Resources.Jobs["job1"].Tags["dev"]) assert.Equal(t, "Héllö wörld?!", b.Config.Resources.Jobs["job1"].Tags["dev"])
@ -182,8 +182,8 @@ func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
err := bundle.Apply(context.Background(), b, ProcessTargetMode()) diags := bundle.Apply(context.Background(), b, ProcessTargetMode())
require.NoError(t, err) require.NoError(t, diags.Error())
// Assert that tag normalization took place. // Assert that tag normalization took place.
assert.Equal(t, "Hello_world", b.Config.Resources.Jobs["job1"].Tags["dev"]) assert.Equal(t, "Hello_world", b.Config.Resources.Jobs["job1"].Tags["dev"])
@ -193,8 +193,8 @@ func TestProcessTargetModeDefault(t *testing.T) {
b := mockBundle("") b := mockBundle("")
m := ProcessTargetMode() m := ProcessTargetMode()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
@ -205,15 +205,15 @@ func TestProcessTargetModeDefault(t *testing.T) {
func TestProcessTargetModeProduction(t *testing.T) { func TestProcessTargetModeProduction(t *testing.T) {
b := mockBundle(config.Production) b := mockBundle(config.Production)
err := validateProductionMode(context.Background(), b, false) diags := validateProductionMode(context.Background(), b, false)
require.ErrorContains(t, err, "run_as") require.ErrorContains(t, diags.Error(), "run_as")
b.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state" b.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state"
b.Config.Workspace.ArtifactPath = "/Shared/.bundle/x/y/artifacts" b.Config.Workspace.ArtifactPath = "/Shared/.bundle/x/y/artifacts"
b.Config.Workspace.FilePath = "/Shared/.bundle/x/y/files" b.Config.Workspace.FilePath = "/Shared/.bundle/x/y/files"
err = validateProductionMode(context.Background(), b, false) diags = validateProductionMode(context.Background(), b, false)
require.ErrorContains(t, err, "production") require.ErrorContains(t, diags.Error(), "production")
permissions := []resources.Permission{ permissions := []resources.Permission{
{ {
@ -232,8 +232,8 @@ func TestProcessTargetModeProduction(t *testing.T) {
b.Config.Resources.Models["model1"].Permissions = permissions b.Config.Resources.Models["model1"].Permissions = permissions
b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions
err = validateProductionMode(context.Background(), b, false) diags = validateProductionMode(context.Background(), b, false)
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
@ -246,12 +246,12 @@ func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) {
b := mockBundle(config.Production) b := mockBundle(config.Production)
// Our target has all kinds of problems when not using service principals ... // Our target has all kinds of problems when not using service principals ...
err := validateProductionMode(context.Background(), b, false) diags := validateProductionMode(context.Background(), b, false)
require.Error(t, err) require.Error(t, diags.Error())
// ... but we're much less strict when a principal is used // ... but we're much less strict when a principal is used
err = validateProductionMode(context.Background(), b, true) diags = validateProductionMode(context.Background(), b, true)
require.NoError(t, err) require.NoError(t, diags.Error())
} }
// Make sure that we have test coverage for all resource types // Make sure that we have test coverage for all resource types
@ -277,8 +277,8 @@ func TestAllResourcesRenamed(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
m := ProcessTargetMode() m := ProcessTargetMode()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, err) require.NoError(t, diags.Error())
resources := reflect.ValueOf(b.Config.Resources) resources := reflect.ValueOf(b.Config.Resources)
for i := 0; i < resources.NumField(); i++ { for i := 0; i < resources.NumField(); i++ {

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
@ -15,7 +16,7 @@ func ResolveResourceReferences() bundle.Mutator {
return &resolveResourceReferences{} return &resolveResourceReferences{}
} }
func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
errs, errCtx := errgroup.WithContext(ctx) errs, errCtx := errgroup.WithContext(ctx)
for k := range b.Config.Variables { for k := range b.Config.Variables {
@ -40,7 +41,7 @@ func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle)
}) })
} }
return errs.Wait() return diag.FromErr(errs.Wait())
} }
func (*resolveResourceReferences) Name() string { func (*resolveResourceReferences) Name() string {

View File

@ -50,8 +50,8 @@ func TestResolveClusterReference(t *testing.T) {
ClusterId: "9876-5432-xywz", ClusterId: "9876-5432-xywz",
}, nil) }, nil)
err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value) require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value)
require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value) require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value)
} }
@ -79,8 +79,8 @@ func TestResolveNonExistentClusterReference(t *testing.T) {
clusterApi := m.GetMockClustersAPI() clusterApi := m.GetMockClustersAPI()
clusterApi.EXPECT().GetByClusterName(mock.Anything, clusterRef).Return(nil, fmt.Errorf("ClusterDetails named '%s' does not exist", clusterRef)) clusterApi.EXPECT().GetByClusterName(mock.Anything, clusterRef).Return(nil, fmt.Errorf("ClusterDetails named '%s' does not exist", clusterRef))
err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.ErrorContains(t, err, "failed to resolve cluster: Random, err: ClusterDetails named 'Random' does not exist") require.ErrorContains(t, diags.Error(), "failed to resolve cluster: Random, err: ClusterDetails named 'Random' does not exist")
} }
func TestNoLookupIfVariableIsSet(t *testing.T) { func TestNoLookupIfVariableIsSet(t *testing.T) {
@ -102,8 +102,8 @@ func TestNoLookupIfVariableIsSet(t *testing.T) {
b.Config.Variables["my-cluster-id"].Set("random value") b.Config.Variables["my-cluster-id"].Set("random value")
err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value) require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value)
} }
@ -129,7 +129,7 @@ func TestResolveServicePrincipal(t *testing.T) {
ApplicationId: "app-1234", ApplicationId: "app-1234",
}, nil) }, nil)
err := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value) require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value)
} }

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
@ -26,7 +27,7 @@ func (m *resolveVariableReferences) Validate(ctx context.Context, b *bundle.Bund
return nil return nil
} }
func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
prefixes := make([]dyn.Path, len(m.prefixes)) prefixes := make([]dyn.Path, len(m.prefixes))
for i, prefix := range m.prefixes { for i, prefix := range m.prefixes {
prefixes[i] = dyn.MustPathFromString(prefix) prefixes[i] = dyn.MustPathFromString(prefix)
@ -36,7 +37,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
// We rewrite it here to make the resolution logic simpler. // We rewrite it here to make the resolution logic simpler.
varPath := dyn.NewPath(dyn.Key("var")) varPath := dyn.NewPath(dyn.Key("var"))
return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
// Synthesize a copy of the root that has all fields that are present in the type // Synthesize a copy of the root that has all fields that are present in the type
// but not set in the dynamic value set to their corresponding empty value. // but not set in the dynamic value set to their corresponding empty value.
// This enables users to interpolate variable references to fields that haven't // This enables users to interpolate variable references to fields that haven't
@ -92,4 +93,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
} }
return root, nil return root, nil
}) })
return diag.FromErr(err)
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
@ -29,14 +30,14 @@ func TestResolveVariableReferences(t *testing.T) {
} }
// Apply with an invalid prefix. This should not change the workspace root path. // Apply with an invalid prefix. This should not change the workspace root path.
err := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist")) diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist"))
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, "${bundle.name}/bar", b.Config.Workspace.RootPath) require.Equal(t, "${bundle.name}/bar", b.Config.Workspace.RootPath)
require.Equal(t, "${workspace.root_path}/baz", b.Config.Workspace.FilePath) require.Equal(t, "${workspace.root_path}/baz", b.Config.Workspace.FilePath)
// Apply with a valid prefix. This should change the workspace root path. // Apply with a valid prefix. This should change the workspace root path.
err = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace")) diags = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace"))
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, "example/bar", b.Config.Workspace.RootPath) require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
require.Equal(t, "example/bar/baz", b.Config.Workspace.FilePath) require.Equal(t, "example/bar/baz", b.Config.Workspace.FilePath)
} }
@ -63,8 +64,8 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
} }
// Apply with a valid prefix. This should change the workspace root path. // Apply with a valid prefix. This should change the workspace root path.
err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "variables")) diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "variables"))
require.NoError(t, err) require.NoError(t, diags.Error())
require.Equal(t, "example/bar", b.Config.Workspace.RootPath) require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
} }
@ -92,15 +93,15 @@ func TestResolveVariableReferencesToEmptyFields(t *testing.T) {
} }
// Apply for the bundle prefix. // Apply for the bundle prefix.
err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle")) diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle"))
require.NoError(t, err) require.NoError(t, diags.Error())
// The job settings should have been interpolated to an empty string. // The job settings should have been interpolated to an empty string.
require.Equal(t, "", b.Config.Resources.Jobs["job1"].JobSettings.Tags["git_branch"]) require.Equal(t, "", b.Config.Resources.Jobs["job1"].JobSettings.Tags["git_branch"])
} }
func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) { func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
var err error var diags diag.Diagnostics
b := &bundle.Bundle{ b := &bundle.Bundle{
Config: config.Root{ Config: config.Root{
@ -142,20 +143,21 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
ctx := context.Background() ctx := context.Background()
// Initialize the variables. // Initialize the variables.
err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { diags = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.InitializeVariables([]string{ err := b.Config.InitializeVariables([]string{
"no_alert_for_canceled_runs=true", "no_alert_for_canceled_runs=true",
"no_alert_for_skipped_runs=true", "no_alert_for_skipped_runs=true",
"min_workers=1", "min_workers=1",
"max_workers=2", "max_workers=2",
"spot_bid_max_price=0.5", "spot_bid_max_price=0.5",
}) })
return diag.FromErr(err)
}) })
require.NoError(t, err) require.NoError(t, diags.Error())
// Assign the variables to the dynamic configuration. // Assign the variables to the dynamic configuration.
err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { diags = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var p dyn.Path var p dyn.Path
var err error var err error
@ -180,12 +182,13 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
return v, nil return v, nil
}) })
return diag.FromErr(err)
}) })
require.NoError(t, err) require.NoError(t, diags.Error())
// Apply for the variable prefix. This should resolve the variables to their values. // Apply for the variable prefix. This should resolve the variables to their values.
err = bundle.Apply(context.Background(), b, ResolveVariableReferences("variables")) diags = bundle.Apply(context.Background(), b, ResolveVariableReferences("variables"))
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForCanceledRuns) assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForCanceledRuns)
assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForSkippedRuns) assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForSkippedRuns)
assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MinWorkers) assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MinWorkers)

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
@ -41,8 +42,8 @@ func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc {
} }
} }
func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
return dyn.Map(v, "sync", func(_ dyn.Path, v dyn.Value) (nv dyn.Value, err error) { return dyn.Map(v, "sync", func(_ dyn.Path, v dyn.Value) (nv dyn.Value, err error) {
v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path)))
if err != nil { if err != nil {
@ -55,4 +56,6 @@ func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error {
return v, nil return v, nil
}) })
}) })
return diag.FromErr(err)
} }

View File

@ -34,8 +34,8 @@ func TestRewriteSyncPathsRelative(t *testing.T) {
bundletest.SetLocation(b, "sync.exclude[0]", "./a/b/file.yml") bundletest.SetLocation(b, "sync.exclude[0]", "./a/b/file.yml")
bundletest.SetLocation(b, "sync.exclude[1]", "./a/b/c/file.yml") bundletest.SetLocation(b, "sync.exclude[1]", "./a/b/c/file.yml")
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0]) assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0])
assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1]) assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1])
@ -65,8 +65,8 @@ func TestRewriteSyncPathsAbsolute(t *testing.T) {
bundletest.SetLocation(b, "sync.exclude[0]", "/tmp/dir/a/b/file.yml") bundletest.SetLocation(b, "sync.exclude[0]", "/tmp/dir/a/b/file.yml")
bundletest.SetLocation(b, "sync.exclude[1]", "/tmp/dir/a/b/c/file.yml") bundletest.SetLocation(b, "sync.exclude[1]", "/tmp/dir/a/b/c/file.yml")
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0]) assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0])
assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1]) assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1])
@ -82,8 +82,8 @@ func TestRewriteSyncPathsErrorPaths(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
assert.NoError(t, err) assert.NoError(t, diags.Error())
}) })
t.Run("empty include/exclude blocks", func(t *testing.T) { t.Run("empty include/exclude blocks", func(t *testing.T) {
@ -97,7 +97,7 @@ func TestRewriteSyncPathsErrorPaths(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) diags := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
assert.NoError(t, err) assert.NoError(t, diags.Error())
}) })
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
@ -23,7 +24,7 @@ func (m *setRunAs) Name() string {
return "SetRunAs" return "SetRunAs"
} }
func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) error { func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
runAs := b.Config.RunAs runAs := b.Config.RunAs
if runAs == nil { if runAs == nil {
return nil return nil

View File

@ -2,10 +2,10 @@ package mutator
import ( import (
"context" "context"
"fmt"
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
) )
@ -20,9 +20,9 @@ func (m *selectDefaultTarget) Name() string {
return "SelectDefaultTarget" return "SelectDefaultTarget"
} }
func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if len(b.Config.Targets) == 0 { if len(b.Config.Targets) == 0 {
return fmt.Errorf("no targets defined") return diag.Errorf("no targets defined")
} }
// One target means there's only one default. // One target means there's only one default.
@ -41,12 +41,12 @@ func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error
// It is invalid to have multiple targets with the `default` flag set. // It is invalid to have multiple targets with the `default` flag set.
if len(defaults) > 1 { if len(defaults) > 1 {
return fmt.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", ")) return diag.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", "))
} }
// If no target has the `default` flag set, ask the user to specify one. // If no target has the `default` flag set, ask the user to specify one.
if len(defaults) == 0 { if len(defaults) == 0 {
return fmt.Errorf("please specify target") return diag.Errorf("please specify target")
} }
// One default remaining. // One default remaining.

View File

@ -16,8 +16,8 @@ func TestSelectDefaultTargetNoTargets(t *testing.T) {
Targets: map[string]*config.Target{}, Targets: map[string]*config.Target{},
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget())
assert.ErrorContains(t, err, "no targets defined") assert.ErrorContains(t, diags.Error(), "no targets defined")
} }
func TestSelectDefaultTargetSingleTargets(t *testing.T) { func TestSelectDefaultTargetSingleTargets(t *testing.T) {
@ -28,8 +28,8 @@ func TestSelectDefaultTargetSingleTargets(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget())
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Equal(t, "foo", b.Config.Bundle.Target) assert.Equal(t, "foo", b.Config.Bundle.Target)
} }
@ -43,8 +43,8 @@ func TestSelectDefaultTargetNoDefaults(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget())
assert.ErrorContains(t, err, "please specify target") assert.ErrorContains(t, diags.Error(), "please specify target")
} }
func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) {
@ -56,8 +56,8 @@ func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget())
assert.ErrorContains(t, err, "please specify target") assert.ErrorContains(t, diags.Error(), "please specify target")
} }
func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { func TestSelectDefaultTargetMultipleDefaults(t *testing.T) {
@ -70,8 +70,8 @@ func TestSelectDefaultTargetMultipleDefaults(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget())
assert.ErrorContains(t, err, "multiple targets are marked as default") assert.ErrorContains(t, diags.Error(), "multiple targets are marked as default")
} }
func TestSelectDefaultTargetSingleDefault(t *testing.T) { func TestSelectDefaultTargetSingleDefault(t *testing.T) {
@ -84,7 +84,7 @@ func TestSelectDefaultTargetSingleDefault(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) diags := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget())
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Equal(t, "bar", b.Config.Bundle.Target) assert.Equal(t, "bar", b.Config.Bundle.Target)
} }

View File

@ -6,6 +6,7 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
) )
@ -24,21 +25,21 @@ func (m *selectTarget) Name() string {
return fmt.Sprintf("SelectTarget(%s)", m.name) return fmt.Sprintf("SelectTarget(%s)", m.name)
} }
func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
if b.Config.Targets == nil { if b.Config.Targets == nil {
return fmt.Errorf("no targets defined") return diag.Errorf("no targets defined")
} }
// Get specified target // Get specified target
_, ok := b.Config.Targets[m.name] _, ok := b.Config.Targets[m.name]
if !ok { if !ok {
return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", ")) return diag.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", "))
} }
// Merge specified target into root configuration structure. // Merge specified target into root configuration structure.
err := b.Config.MergeTargetOverrides(m.name) err := b.Config.MergeTargetOverrides(m.name)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Store specified target in configuration for reference. // Store specified target in configuration for reference.

View File

@ -26,8 +26,8 @@ func TestSelectTarget(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectTarget("default")) diags := bundle.Apply(context.Background(), b, mutator.SelectTarget("default"))
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "bar", b.Config.Workspace.Host) assert.Equal(t, "bar", b.Config.Workspace.Host)
} }
@ -39,6 +39,6 @@ func TestSelectTargetNotFound(t *testing.T) {
}, },
}, },
} }
err := bundle.Apply(context.Background(), b, mutator.SelectTarget("doesnt-exist")) diags := bundle.Apply(context.Background(), b, mutator.SelectTarget("doesnt-exist"))
require.Error(t, err, "no targets defined") require.Error(t, diags.Error(), "no targets defined")
} }

View File

@ -2,10 +2,10 @@ package mutator
import ( import (
"context" "context"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
) )
@ -21,7 +21,7 @@ func (m *setVariables) Name() string {
return "SetVariables" return "SetVariables"
} }
func setVariable(ctx context.Context, v *variable.Variable, name string) error { func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Diagnostics {
// case: variable already has value initialized, so skip // case: variable already has value initialized, so skip
if v.HasValue() { if v.HasValue() {
return nil return nil
@ -32,7 +32,7 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) error {
if val, ok := env.Lookup(ctx, envVarName); ok { if val, ok := env.Lookup(ctx, envVarName); ok {
err := v.Set(val) err := v.Set(val)
if err != nil { if err != nil {
return fmt.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %w`, val, name, envVarName, err) return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err)
} }
return nil return nil
} }
@ -41,7 +41,7 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) error {
if v.HasDefault() { if v.HasDefault() {
err := v.Set(*v.Default) err := v.Set(*v.Default)
if err != nil { if err != nil {
return fmt.Errorf(`failed to assign default value from config "%s" to variable %s with error: %w`, *v.Default, name, err) return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, *v.Default, name, err)
} }
return nil return nil
} }
@ -55,15 +55,16 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) error {
// We should have had a value to set for the variable at this point. // We should have had a value to set for the variable at this point.
// TODO: use cmdio to request values for unassigned variables if current // TODO: use cmdio to request values for unassigned variables if current
// terminal is a tty. Tracked in https://github.com/databricks/cli/issues/379 // terminal is a tty. Tracked in https://github.com/databricks/cli/issues/379
return fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) return diag.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
} }
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
for name, variable := range b.Config.Variables { for name, variable := range b.Config.Variables {
err := setVariable(ctx, variable, name) diags = diags.Extend(setVariable(ctx, variable, name))
if err != nil { if diags.HasError() {
return err return diags
} }
} }
return nil return diags
} }

View File

@ -21,8 +21,8 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
// set value for variable as an environment variable // set value for variable as an environment variable
t.Setenv("BUNDLE_VAR_foo", "process-env") t.Setenv("BUNDLE_VAR_foo", "process-env")
err := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "process-env") assert.Equal(t, *variable.Value, "process-env")
} }
@ -33,8 +33,8 @@ func TestSetVariableUsingDefaultValue(t *testing.T) {
Default: &defaultVal, Default: &defaultVal,
} }
err := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "default") assert.Equal(t, *variable.Value, "default")
} }
@ -49,8 +49,8 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
// since a value is already assigned to the variable, it would not be overridden // since a value is already assigned to the variable, it would not be overridden
// by the default value // by the default value
err := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "assigned-value") assert.Equal(t, *variable.Value, "assigned-value")
} }
@ -68,8 +68,8 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
// since a value is already assigned to the variable, it would not be overridden // since a value is already assigned to the variable, it would not be overridden
// by the value from environment // by the value from environment
err := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "assigned-value") assert.Equal(t, *variable.Value, "assigned-value")
} }
@ -79,8 +79,8 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
} }
// fails because we could not resolve a value for the variable // fails because we could not resolve a value for the variable
err := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable") assert.ErrorContains(t, diags.Error(), "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable")
} }
func TestSetVariablesMutator(t *testing.T) { func TestSetVariablesMutator(t *testing.T) {
@ -108,8 +108,8 @@ func TestSetVariablesMutator(t *testing.T) {
t.Setenv("BUNDLE_VAR_b", "env-var-b") t.Setenv("BUNDLE_VAR_b", "env-var-b")
err := bundle.Apply(context.Background(), b, SetVariables()) diags := bundle.Apply(context.Background(), b, SetVariables())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) assert.Equal(t, "default-a", *b.Config.Variables["a"].Value)
assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value)
assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value) assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value)

View File

@ -9,6 +9,7 @@ import (
"text/template" "text/template"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
@ -40,12 +41,12 @@ func (m *trampoline) Name() string {
return fmt.Sprintf("trampoline(%s)", m.name) return fmt.Sprintf("trampoline(%s)", m.name)
} }
func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tasks := m.functions.GetTasks(b) tasks := m.functions.GetTasks(b)
for _, task := range tasks { for _, task := range tasks {
err := m.generateNotebookWrapper(ctx, b, task) err := m.generateNotebookWrapper(ctx, b, task)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
} }
return nil return nil

View File

@ -80,8 +80,8 @@ func TestGenerateTrampoline(t *testing.T) {
funcs := functions{} funcs := functions{}
trampoline := NewTrampoline("test_trampoline", &funcs, "Hello from {{.MyName}}") trampoline := NewTrampoline("test_trampoline", &funcs, "Hello from {{.MyName}}")
err := bundle.Apply(ctx, b, trampoline) diags := bundle.Apply(ctx, b, trampoline)
require.NoError(t, err) require.NoError(t, diags.Error())
dir, err := b.InternalDir(ctx) dir, err := b.InternalDir(ctx)
require.NoError(t, err) require.NoError(t, err)

View File

@ -11,6 +11,7 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/notebook" "github.com/databricks/cli/libs/notebook"
) )
@ -185,10 +186,10 @@ func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.V
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) error { func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
m.seen = make(map[string]string) m.seen = make(map[string]string)
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var err error var err error
for _, fn := range []func(*bundle.Bundle, dyn.Value) (dyn.Value, error){ for _, fn := range []func(*bundle.Bundle, dyn.Value) (dyn.Value, error){
m.applyJobTranslations, m.applyJobTranslations,
@ -202,4 +203,6 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) error {
} }
return v, nil return v, nil
}) })
return diag.FromErr(err)
} }

View File

@ -78,8 +78,8 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal( assert.Equal(
t, t,
@ -201,8 +201,8 @@ func TestTranslatePaths(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
require.NoError(t, err) require.NoError(t, diags.Error())
// Assert that the path in the tasks now refer to the artifact. // Assert that the path in the tasks now refer to the artifact.
assert.Equal( assert.Equal(
@ -332,8 +332,8 @@ func TestTranslatePathsInSubdirectories(t *testing.T) {
bundletest.SetLocation(b, "resources.jobs", filepath.Join(dir, "job/resource.yml")) bundletest.SetLocation(b, "resources.jobs", filepath.Join(dir, "job/resource.yml"))
bundletest.SetLocation(b, "resources.pipelines", filepath.Join(dir, "pipeline/resource.yml")) bundletest.SetLocation(b, "resources.pipelines", filepath.Join(dir, "pipeline/resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal( assert.Equal(
t, t,
@ -392,8 +392,8 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "../resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "../resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.ErrorContains(t, err, "is not contained in bundle root") assert.ErrorContains(t, diags.Error(), "is not contained in bundle root")
} }
func TestJobNotebookDoesNotExistError(t *testing.T) { func TestJobNotebookDoesNotExistError(t *testing.T) {
@ -422,8 +422,8 @@ func TestJobNotebookDoesNotExistError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") assert.EqualError(t, diags.Error(), "notebook ./doesnt_exist.py not found")
} }
func TestJobFileDoesNotExistError(t *testing.T) { func TestJobFileDoesNotExistError(t *testing.T) {
@ -452,8 +452,8 @@ func TestJobFileDoesNotExistError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.EqualError(t, err, "file ./doesnt_exist.py not found") assert.EqualError(t, diags.Error(), "file ./doesnt_exist.py not found")
} }
func TestPipelineNotebookDoesNotExistError(t *testing.T) { func TestPipelineNotebookDoesNotExistError(t *testing.T) {
@ -482,8 +482,8 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") assert.EqualError(t, diags.Error(), "notebook ./doesnt_exist.py not found")
} }
func TestPipelineFileDoesNotExistError(t *testing.T) { func TestPipelineFileDoesNotExistError(t *testing.T) {
@ -512,8 +512,8 @@ func TestPipelineFileDoesNotExistError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.EqualError(t, err, "file ./doesnt_exist.py not found") assert.EqualError(t, diags.Error(), "file ./doesnt_exist.py not found")
} }
func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) {
@ -546,8 +546,8 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.ErrorContains(t, err, `expected a file for "resources.jobs.job.tasks[0].spark_python_task.python_file" but got a notebook`) assert.ErrorContains(t, diags.Error(), `expected a file for "resources.jobs.job.tasks[0].spark_python_task.python_file" but got a notebook`)
} }
func TestJobNotebookTaskWithFileSourceError(t *testing.T) { func TestJobNotebookTaskWithFileSourceError(t *testing.T) {
@ -580,8 +580,8 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.ErrorContains(t, err, `expected a notebook for "resources.jobs.job.tasks[0].notebook_task.notebook_path" but got a file`) assert.ErrorContains(t, diags.Error(), `expected a notebook for "resources.jobs.job.tasks[0].notebook_task.notebook_path" but got a file`)
} }
func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
@ -614,8 +614,8 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.ErrorContains(t, err, `expected a notebook for "resources.pipelines.pipeline.libraries[0].notebook.path" but got a file`) assert.ErrorContains(t, diags.Error(), `expected a notebook for "resources.pipelines.pipeline.libraries[0].notebook.path" but got a file`)
} }
func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
@ -648,6 +648,6 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
assert.ErrorContains(t, err, `expected a file for "resources.pipelines.pipeline.libraries[0].file.path" but got a notebook`) assert.ErrorContains(t, diags.Error(), `expected a file for "resources.pipelines.pipeline.libraries[0].file.path" but got a notebook`)
} }

View File

@ -2,9 +2,9 @@ package mutator
import ( import (
"context" "context"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
) )
type validateGitDetails struct{} type validateGitDetails struct{}
@ -17,13 +17,13 @@ func (m *validateGitDetails) Name() string {
return "ValidateGitDetails" return "ValidateGitDetails"
} }
func (m *validateGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *validateGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if b.Config.Bundle.Git.Branch == "" || b.Config.Bundle.Git.ActualBranch == "" { if b.Config.Bundle.Git.Branch == "" || b.Config.Bundle.Git.ActualBranch == "" {
return nil return nil
} }
if b.Config.Bundle.Git.Branch != b.Config.Bundle.Git.ActualBranch && !b.Config.Bundle.Force { if b.Config.Bundle.Git.Branch != b.Config.Bundle.Git.ActualBranch && !b.Config.Bundle.Force {
return fmt.Errorf("not on the right Git branch:\n expected according to configuration: %s\n actual: %s\nuse --force to override", b.Config.Bundle.Git.Branch, b.Config.Bundle.Git.ActualBranch) return diag.Errorf("not on the right Git branch:\n expected according to configuration: %s\n actual: %s\nuse --force to override", b.Config.Bundle.Git.Branch, b.Config.Bundle.Git.ActualBranch)
} }
return nil return nil
} }

View File

@ -22,9 +22,8 @@ func TestValidateGitDetailsMatchingBranches(t *testing.T) {
} }
m := ValidateGitDetails() m := ValidateGitDetails()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
assert.NoError(t, diags.Error())
assert.NoError(t, err)
} }
func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { func TestValidateGitDetailsNonMatchingBranches(t *testing.T) {
@ -40,10 +39,10 @@ func TestValidateGitDetailsNonMatchingBranches(t *testing.T) {
} }
m := ValidateGitDetails() m := ValidateGitDetails()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override" expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override"
assert.EqualError(t, err, expectedError) assert.EqualError(t, diags.Error(), expectedError)
} }
func TestValidateGitDetailsNotUsingGit(t *testing.T) { func TestValidateGitDetailsNotUsingGit(t *testing.T) {
@ -59,7 +58,6 @@ func TestValidateGitDetailsNotUsingGit(t *testing.T) {
} }
m := ValidateGitDetails() m := ValidateGitDetails()
err := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
assert.NoError(t, diags.Error())
assert.NoError(t, err)
} }

View File

@ -3,7 +3,7 @@ package bundle
import ( import (
"context" "context"
"github.com/databricks/cli/libs/errs" "github.com/databricks/cli/libs/diag"
) )
type DeferredMutator struct { type DeferredMutator struct {
@ -22,12 +22,9 @@ func Defer(mutator Mutator, finally Mutator) Mutator {
} }
} }
func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) error { func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
mainErr := Apply(ctx, b, d.mutator) var diags diag.Diagnostics
errOnFinish := Apply(ctx, b, d.finally) diags = diags.Extend(Apply(ctx, b, d.mutator))
if mainErr != nil || errOnFinish != nil { diags = diags.Extend(Apply(ctx, b, d.finally))
return errs.FromMany(mainErr, errOnFinish) return diags
}
return nil
} }

View File

@ -2,9 +2,9 @@ package bundle
import ( import (
"context" "context"
"fmt"
"testing" "testing"
"github.com/databricks/cli/libs/diag"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -17,9 +17,9 @@ func (t *mutatorWithError) Name() string {
return "mutatorWithError" return "mutatorWithError"
} }
func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) error { func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics {
t.applyCalled++ t.applyCalled++
return fmt.Errorf(t.errorMsg) return diag.Errorf(t.errorMsg)
} }
func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) { func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) {
@ -30,8 +30,8 @@ func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) {
deferredMutator := Defer(Seq(m1, m2, m3), cleanup) deferredMutator := Defer(Seq(m1, m2, m3), cleanup)
b := &Bundle{} b := &Bundle{}
err := Apply(context.Background(), b, deferredMutator) diags := Apply(context.Background(), b, deferredMutator)
assert.NoError(t, err) assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled) assert.Equal(t, 1, m2.applyCalled)
@ -47,8 +47,8 @@ func TestDeferredMutatorWhenFirstFails(t *testing.T) {
deferredMutator := Defer(Seq(mErr, m1, m2), cleanup) deferredMutator := Defer(Seq(mErr, m1, m2), cleanup)
b := &Bundle{} b := &Bundle{}
err := Apply(context.Background(), b, deferredMutator) diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred") assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, mErr.applyCalled) assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m1.applyCalled) assert.Equal(t, 0, m1.applyCalled)
@ -64,8 +64,8 @@ func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) {
deferredMutator := Defer(Seq(m1, mErr, m2), cleanup) deferredMutator := Defer(Seq(m1, mErr, m2), cleanup)
b := &Bundle{} b := &Bundle{}
err := Apply(context.Background(), b, deferredMutator) diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred") assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, mErr.applyCalled) assert.Equal(t, 1, mErr.applyCalled)
@ -81,8 +81,8 @@ func TestDeferredMutatorWhenLastOneFails(t *testing.T) {
deferredMutator := Defer(Seq(m1, m2, mErr), cleanup) deferredMutator := Defer(Seq(m1, m2, mErr), cleanup)
b := &Bundle{} b := &Bundle{}
err := Apply(context.Background(), b, deferredMutator) diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred") assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled) assert.Equal(t, 1, m2.applyCalled)
@ -98,8 +98,14 @@ func TestDeferredMutatorCombinesErrorMessages(t *testing.T) {
deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr) deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr)
b := &Bundle{} b := &Bundle{}
err := Apply(context.Background(), b, deferredMutator) diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred\ncleanup error occurred")
var errs []string
for _, d := range diags {
errs = append(errs, d.Summary)
}
assert.Contains(t, errs, "mutator error occurred")
assert.Contains(t, errs, "cleanup error occurred")
assert.Equal(t, 1, m1.applyCalled) assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled) assert.Equal(t, 1, m2.applyCalled)

View File

@ -6,6 +6,7 @@ import (
"strconv" "strconv"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
@ -30,29 +31,29 @@ func (l *checkRunningResources) Name() string {
return "check-running-resources" return "check-running-resources"
} }
func (l *checkRunningResources) Apply(ctx context.Context, b *bundle.Bundle) error { func (l *checkRunningResources) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if !b.Config.Bundle.Deployment.FailOnActiveRuns { if !b.Config.Bundle.Deployment.FailOnActiveRuns {
return nil return nil
} }
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
err := tf.Init(ctx, tfexec.Upgrade(true)) err := tf.Init(ctx, tfexec.Upgrade(true))
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
state, err := b.Terraform.Show(ctx) state, err := b.Terraform.Show(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
err = checkAnyResourceRunning(ctx, b.WorkspaceClient(), state) err = checkAnyResourceRunning(ctx, b.WorkspaceClient(), state)
if err != nil { if err != nil {
return fmt.Errorf("deployment aborted, err: %w", err) return diag.Errorf("deployment aborted, err: %v", err)
} }
return nil return nil

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/fatih/color" "github.com/fatih/color"
) )
@ -16,7 +17,7 @@ func (m *delete) Name() string {
return "files.Delete" return "files.Delete"
} }
func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// Do not delete files if terraform destroy was not consented // Do not delete files if terraform destroy was not consented
if !b.Plan.IsEmpty && !b.Plan.ConfirmApply { if !b.Plan.IsEmpty && !b.Plan.ConfirmApply {
return nil return nil
@ -29,7 +30,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error {
if !b.AutoApprove { if !b.AutoApprove {
proceed, err := cmdio.AskYesOrNo(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!"))) proceed, err := cmdio.AskYesOrNo(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!")))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
if !proceed { if !proceed {
return nil return nil
@ -41,17 +42,17 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error {
Recursive: true, Recursive: true,
}) })
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Clean up sync snapshot file // Clean up sync snapshot file
sync, err := GetSync(ctx, b) sync, err := GetSync(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
err = sync.DestroySnapshot(ctx) err = sync.DestroySnapshot(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
cmdio.LogString(ctx, fmt.Sprintf("Deleted snapshot file at %s", sync.SnapshotPath())) cmdio.LogString(ctx, fmt.Sprintf("Deleted snapshot file at %s", sync.SnapshotPath()))

View File

@ -6,6 +6,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -15,16 +16,16 @@ func (m *upload) Name() string {
return "files.Upload" return "files.Upload"
} }
func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
cmdio.LogString(ctx, fmt.Sprintf("Uploading bundle files to %s...", b.Config.Workspace.FilePath)) cmdio.LogString(ctx, fmt.Sprintf("Uploading bundle files to %s...", b.Config.Workspace.FilePath))
sync, err := GetSync(ctx, b) sync, err := GetSync(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
err = sync.RunOnce(ctx) err = sync.RunOnce(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
log.Infof(ctx, "Uploaded bundle files") log.Infof(ctx, "Uploaded bundle files")

View File

@ -3,9 +3,9 @@ package lock
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
@ -33,7 +33,7 @@ func (m *acquire) init(b *bundle.Bundle) error {
return nil return nil
} }
func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// Return early if locking is disabled. // Return early if locking is disabled.
if !b.Config.Bundle.Deployment.Lock.IsEnabled() { if !b.Config.Bundle.Deployment.Lock.IsEnabled() {
log.Infof(ctx, "Skipping; locking is disabled") log.Infof(ctx, "Skipping; locking is disabled")
@ -42,7 +42,7 @@ func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) error {
err := m.init(b) err := m.init(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
force := b.Config.Bundle.Deployment.Lock.Force force := b.Config.Bundle.Deployment.Lock.Force
@ -55,9 +55,9 @@ func (m *acquire) Apply(ctx context.Context, b *bundle.Bundle) error {
if errors.As(err, &notExistsError) { if errors.As(err, &notExistsError) {
// If we get a "doesn't exist" error from the API this indicates // If we get a "doesn't exist" error from the API this indicates
// we either don't have permissions or the path is invalid. // we either don't have permissions or the path is invalid.
return fmt.Errorf("cannot write to deployment root (this can indicate a previous deploy was done with a different identity): %s", b.Config.Workspace.RootPath) return diag.Errorf("cannot write to deployment root (this can indicate a previous deploy was done with a different identity): %s", b.Config.Workspace.RootPath)
} }
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -2,9 +2,9 @@ package lock
import ( import (
"context" "context"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -30,7 +30,7 @@ func (m *release) Name() string {
return "lock:release" return "lock:release"
} }
func (m *release) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *release) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// Return early if locking is disabled. // Return early if locking is disabled.
if !b.Config.Bundle.Deployment.Lock.IsEnabled() { if !b.Config.Bundle.Deployment.Lock.IsEnabled() {
log.Infof(ctx, "Skipping; locking is disabled") log.Infof(ctx, "Skipping; locking is disabled")
@ -47,12 +47,12 @@ func (m *release) Apply(ctx context.Context, b *bundle.Bundle) error {
log.Infof(ctx, "Releasing deployment lock") log.Infof(ctx, "Releasing deployment lock")
switch m.goal { switch m.goal {
case GoalDeploy: case GoalDeploy:
return b.Locker.Unlock(ctx) return diag.FromErr(b.Locker.Unlock(ctx))
case GoalBind, GoalUnbind: case GoalBind, GoalUnbind:
return b.Locker.Unlock(ctx) return diag.FromErr(b.Locker.Unlock(ctx))
case GoalDestroy: case GoalDestroy:
return b.Locker.Unlock(ctx, locker.AllowLockFileNotExist) return diag.FromErr(b.Locker.Unlock(ctx, locker.AllowLockFileNotExist))
default: default:
return fmt.Errorf("unknown goal for lock release: %s", m.goal) return diag.Errorf("unknown goal for lock release: %s", m.goal)
} }
} }

View File

@ -5,6 +5,7 @@ import (
"path" "path"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
@ -18,7 +19,7 @@ func (m *annotateJobs) Name() string {
return "metadata.AnnotateJobs" return "metadata.AnnotateJobs"
} }
func (m *annotateJobs) Apply(_ context.Context, b *bundle.Bundle) error { func (m *annotateJobs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
for _, job := range b.Config.Resources.Jobs { for _, job := range b.Config.Resources.Jobs {
if job.JobSettings == nil { if job.JobSettings == nil {
continue continue

View File

@ -9,6 +9,7 @@ import (
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
func TestAnnotateJobsMutator(t *testing.T) { func TestAnnotateJobsMutator(t *testing.T) {
@ -34,8 +35,8 @@ func TestAnnotateJobsMutator(t *testing.T) {
}, },
} }
err := AnnotateJobs().Apply(context.Background(), b) diags := AnnotateJobs().Apply(context.Background(), b)
assert.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, assert.Equal(t,
&jobs.JobDeployment{ &jobs.JobDeployment{
@ -67,6 +68,6 @@ func TestAnnotateJobsMutatorJobWithoutSettings(t *testing.T) {
}, },
} }
err := AnnotateJobs().Apply(context.Background(), b) diags := AnnotateJobs().Apply(context.Background(), b)
assert.NoError(t, err) require.NoError(t, diags.Error())
} }

View File

@ -2,12 +2,12 @@ package metadata
import ( import (
"context" "context"
"fmt"
"path/filepath" "path/filepath"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/metadata" "github.com/databricks/cli/bundle/metadata"
"github.com/databricks/cli/libs/diag"
) )
type compute struct{} type compute struct{}
@ -20,7 +20,7 @@ func (m *compute) Name() string {
return "metadata.Compute" return "metadata.Compute"
} }
func (m *compute) Apply(_ context.Context, b *bundle.Bundle) error { func (m *compute) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
b.Metadata = metadata.Metadata{ b.Metadata = metadata.Metadata{
Version: metadata.Version, Version: metadata.Version,
Config: metadata.Config{}, Config: metadata.Config{},
@ -41,7 +41,7 @@ func (m *compute) Apply(_ context.Context, b *bundle.Bundle) error {
// root // root
relativePath, err := filepath.Rel(b.Config.Path, job.ConfigFilePath) relativePath, err := filepath.Rel(b.Config.Path, job.ConfigFilePath)
if err != nil { if err != nil {
return fmt.Errorf("failed to compute relative path for job %s: %w", name, err) return diag.Errorf("failed to compute relative path for job %s: %v", name, err)
} }
// Metadata for the job // Metadata for the job
jobsMetadata[name] = &metadata.Job{ jobsMetadata[name] = &metadata.Job{

View File

@ -91,8 +91,8 @@ func TestComputeMetadataMutator(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, Compute()) diags := bundle.Apply(context.Background(), b, Compute())
require.NoError(t, err) require.NoError(t, diags.Error())
assert.Equal(t, expectedMetadata, b.Metadata) assert.Equal(t, expectedMetadata, b.Metadata)
} }

View File

@ -6,6 +6,7 @@ import (
"encoding/json" "encoding/json"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
) )
@ -21,16 +22,16 @@ func (m *upload) Name() string {
return "metadata.Upload" return "metadata.Upload"
} }
func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.StatePath) f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.StatePath)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
metadata, err := json.MarshalIndent(b.Metadata, "", " ") metadata, err := json.MarshalIndent(b.Metadata, "", " ")
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return f.Write(ctx, MetadataFileName, bytes.NewReader(metadata), filer.CreateParentDirectories, filer.OverwriteIfExists) return diag.FromErr(f.Write(ctx, MetadataFileName, bytes.NewReader(metadata), filer.CreateParentDirectories, filer.OverwriteIfExists))
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
@ -20,10 +21,10 @@ type statePull struct {
filerFactory FilerFactory filerFactory FilerFactory
} }
func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
f, err := s.filerFactory(b) f, err := s.filerFactory(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Download deployment state file from filer to local cache directory. // Download deployment state file from filer to local cache directory.
@ -31,7 +32,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error {
remote, err := s.remoteState(ctx, f) remote, err := s.remoteState(ctx, f)
if err != nil { if err != nil {
log.Infof(ctx, "Unable to open remote deployment state file: %s", err) log.Infof(ctx, "Unable to open remote deployment state file: %s", err)
return err return diag.FromErr(err)
} }
if remote == nil { if remote == nil {
log.Infof(ctx, "Remote deployment state file does not exist") log.Infof(ctx, "Remote deployment state file does not exist")
@ -40,19 +41,19 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error {
statePath, err := getPathToStateFile(ctx, b) statePath, err := getPathToStateFile(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0600) local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0600)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer local.Close() defer local.Close()
data := remote.Bytes() data := remote.Bytes()
err = validateRemoteStateCompatibility(bytes.NewReader(data)) err = validateRemoteStateCompatibility(bytes.NewReader(data))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
if !isLocalStateStale(local, bytes.NewReader(data)) { if !isLocalStateStale(local, bytes.NewReader(data)) {
@ -68,30 +69,30 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) error {
log.Infof(ctx, "Writing remote deployment state file to local cache directory") log.Infof(ctx, "Writing remote deployment state file to local cache directory")
_, err = io.Copy(local, bytes.NewReader(data)) _, err = io.Copy(local, bytes.NewReader(data))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
var state DeploymentState var state DeploymentState
err = json.Unmarshal(data, &state) err = json.Unmarshal(data, &state)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Create a new snapshot based on the deployment state file. // Create a new snapshot based on the deployment state file.
opts, err := files.GetSyncOptions(ctx, b) opts, err := files.GetSyncOptions(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
log.Infof(ctx, "Creating new snapshot") log.Infof(ctx, "Creating new snapshot")
snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.Config.Path), opts) snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.Config.Path), opts)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Persist the snapshot to disk. // Persist the snapshot to disk.
log.Infof(ctx, "Persisting snapshot to disk") log.Infof(ctx, "Persisting snapshot to disk")
return snapshot.Save(ctx) return diag.FromErr(snapshot.Save(ctx))
} }
func (s *statePull) remoteState(ctx context.Context, f filer.Filer) (*bytes.Buffer, error) { func (s *statePull) remoteState(ctx context.Context, f filer.Filer) (*bytes.Buffer, error) {

View File

@ -106,8 +106,8 @@ func testStatePull(t *testing.T, opts statePullOpts) {
require.NoError(t, err) require.NoError(t, err)
} }
err := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
require.NoError(t, err) require.NoError(t, diags.Error())
// Check that deployment state was written // Check that deployment state was written
statePath, err := getPathToStateFile(ctx, b) statePath, err := getPathToStateFile(ctx, b)
@ -263,8 +263,8 @@ func TestStatePullNoState(t *testing.T) {
} }
ctx := context.Background() ctx := context.Background()
err := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
require.NoError(t, err) require.NoError(t, diags.Error())
// Check that deployment state was not written // Check that deployment state was not written
statePath, err := getPathToStateFile(ctx, b) statePath, err := getPathToStateFile(ctx, b)
@ -451,7 +451,7 @@ func TestStatePullNewerDeploymentStateVersion(t *testing.T) {
} }
ctx := context.Background() ctx := context.Background()
err := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
require.Error(t, err) require.True(t, diags.HasError())
require.Contains(t, err.Error(), "remote deployment state is incompatible with the current version of the CLI, please upgrade to at least 1.2.3") require.ErrorContains(t, diags.Error(), "remote deployment state is incompatible with the current version of the CLI, please upgrade to at least 1.2.3")
} }

View File

@ -5,6 +5,7 @@ import (
"os" "os"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -17,27 +18,27 @@ func (s *statePush) Name() string {
return "deploy:state-push" return "deploy:state-push"
} }
func (s *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { func (s *statePush) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
f, err := s.filerFactory(b) f, err := s.filerFactory(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
statePath, err := getPathToStateFile(ctx, b) statePath, err := getPathToStateFile(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
local, err := os.Open(statePath) local, err := os.Open(statePath)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer local.Close() defer local.Close()
log.Infof(ctx, "Writing local deployment state file to remote state directory") log.Infof(ctx, "Writing local deployment state file to remote state directory")
err = f.Write(ctx, DeploymentStateFileName, local, filer.CreateParentDirectories, filer.OverwriteIfExists) err = f.Write(ctx, DeploymentStateFileName, local, filer.CreateParentDirectories, filer.OverwriteIfExists)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -77,6 +77,6 @@ func TestStatePush(t *testing.T) {
err = os.WriteFile(statePath, data, 0644) err = os.WriteFile(statePath, data, 0644)
require.NoError(t, err) require.NoError(t, err)
err = bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
require.NoError(t, err) require.NoError(t, diags.Error())
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/internal/build" "github.com/databricks/cli/internal/build"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -21,10 +22,10 @@ func (s *stateUpdate) Name() string {
return "deploy:state-update" return "deploy:state-update"
} }
func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) error { func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
state, err := load(ctx, b) state, err := load(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Increment the state sequence. // Increment the state sequence.
@ -40,41 +41,41 @@ func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) error {
// Get the current file list. // Get the current file list.
sync, err := files.GetSync(ctx, b) sync, err := files.GetSync(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
files, err := sync.GetFileList(ctx) files, err := sync.GetFileList(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Update the state with the current file list. // Update the state with the current file list.
fl, err := FromSlice(files) fl, err := FromSlice(files)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
state.Files = fl state.Files = fl
statePath, err := getPathToStateFile(ctx, b) statePath, err := getPathToStateFile(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Write the state back to the file. // Write the state back to the file.
f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600) f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600)
if err != nil { if err != nil {
log.Infof(ctx, "Unable to open deployment state file: %s", err) log.Infof(ctx, "Unable to open deployment state file: %s", err)
return err return diag.FromErr(err)
} }
defer f.Close() defer f.Close()
data, err := json.Marshal(state) data, err := json.Marshal(state)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
_, err = io.Copy(f, bytes.NewReader(data)) _, err = io.Copy(f, bytes.NewReader(data))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -55,8 +55,8 @@ func TestStateUpdate(t *testing.T) {
ctx := context.Background() ctx := context.Background()
err := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
require.NoError(t, err) require.NoError(t, diags.Error())
// Check that the state file was updated. // Check that the state file was updated.
state, err := load(ctx, b) state, err := load(ctx, b)
@ -66,8 +66,8 @@ func TestStateUpdate(t *testing.T) {
require.Len(t, state.Files, 3) require.Len(t, state.Files, 3)
require.Equal(t, build.GetInfo().Version, state.CliVersion) require.Equal(t, build.GetInfo().Version, state.CliVersion)
err = bundle.Apply(ctx, b, s) diags = bundle.Apply(ctx, b, s)
require.NoError(t, err) require.NoError(t, diags.Error())
// Check that the state file was updated again. // Check that the state file was updated again.
state, err = load(ctx, b) state, err = load(ctx, b)
@ -136,8 +136,8 @@ func TestStateUpdateWithExistingState(t *testing.T) {
err = os.WriteFile(statePath, data, 0644) err = os.WriteFile(statePath, data, 0644)
require.NoError(t, err) require.NoError(t, err)
err = bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
require.NoError(t, err) require.NoError(t, diags.Error())
// Check that the state file was updated. // Check that the state file was updated.
state, err = load(ctx, b) state, err = load(ctx, b)

View File

@ -2,10 +2,10 @@ package terraform
import ( import (
"context" "context"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )
@ -16,22 +16,22 @@ func (w *apply) Name() string {
return "terraform.Apply" return "terraform.Apply"
} }
func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) error { func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
cmdio.LogString(ctx, "Deploying resources...") cmdio.LogString(ctx, "Deploying resources...")
err := tf.Init(ctx, tfexec.Upgrade(true)) err := tf.Init(ctx, tfexec.Upgrade(true))
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
err = tf.Apply(ctx) err = tf.Apply(ctx)
if err != nil { if err != nil {
return fmt.Errorf("terraform apply: %w", err) return diag.Errorf("terraform apply: %v", err)
} }
log.Infof(ctx, "Resource deployment completed") log.Infof(ctx, "Resource deployment completed")

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/fatih/color" "github.com/fatih/color"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
@ -62,7 +63,7 @@ func (w *destroy) Name() string {
return "terraform.Destroy" return "terraform.Destroy"
} }
func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// return early if plan is empty // return early if plan is empty
if b.Plan.IsEmpty { if b.Plan.IsEmpty {
cmdio.LogString(ctx, "No resources to destroy in plan. Skipping destroy!") cmdio.LogString(ctx, "No resources to destroy in plan. Skipping destroy!")
@ -71,19 +72,19 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error {
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
// read plan file // read plan file
plan, err := tf.ShowPlanFile(ctx, b.Plan.Path) plan, err := tf.ShowPlanFile(ctx, b.Plan.Path)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// print the resources that will be destroyed // print the resources that will be destroyed
err = logDestroyPlan(ctx, plan.ResourceChanges) err = logDestroyPlan(ctx, plan.ResourceChanges)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Ask for confirmation, if needed // Ask for confirmation, if needed
@ -91,7 +92,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error {
red := color.New(color.FgRed).SprintFunc() red := color.New(color.FgRed).SprintFunc()
b.Plan.ConfirmApply, err = cmdio.AskYesOrNo(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy"))) b.Plan.ConfirmApply, err = cmdio.AskYesOrNo(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy")))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
} }
@ -101,7 +102,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error {
} }
if b.Plan.Path == "" { if b.Plan.Path == "" {
return fmt.Errorf("no plan found") return diag.Errorf("no plan found")
} }
cmdio.LogString(ctx, "Starting to destroy resources") cmdio.LogString(ctx, "Starting to destroy resources")
@ -109,7 +110,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error {
// Apply terraform according to the computed destroy plan // Apply terraform according to the computed destroy plan
err = tf.Apply(ctx, tfexec.DirOrPlan(b.Plan.Path)) err = tf.Apply(ctx, tfexec.DirOrPlan(b.Plan.Path))
if err != nil { if err != nil {
return fmt.Errorf("terraform destroy: %w", err) return diag.Errorf("terraform destroy: %v", err)
} }
cmdio.LogString(ctx, "Successfully destroyed resources!") cmdio.LogString(ctx, "Successfully destroyed resources!")

View File

@ -10,6 +10,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )
@ -25,31 +26,31 @@ type importResource struct {
} }
// Apply implements bundle.Mutator. // Apply implements bundle.Mutator.
func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
dir, err := Dir(ctx, b) dir, err := Dir(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
err = tf.Init(ctx, tfexec.Upgrade(true)) err = tf.Init(ctx, tfexec.Upgrade(true))
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
tmpDir, err := os.MkdirTemp("", "state-*") tmpDir, err := os.MkdirTemp("", "state-*")
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
tmpState := filepath.Join(tmpDir, TerraformStateFileName) tmpState := filepath.Join(tmpDir, TerraformStateFileName)
importAddress := fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey) importAddress := fmt.Sprintf("%s.%s", m.opts.ResourceType, m.opts.ResourceKey)
err = tf.Import(ctx, importAddress, m.opts.ResourceId, tfexec.StateOut(tmpState)) err = tf.Import(ctx, importAddress, m.opts.ResourceId, tfexec.StateOut(tmpState))
if err != nil { if err != nil {
return fmt.Errorf("terraform import: %w", err) return diag.Errorf("terraform import: %v", err)
} }
buf := bytes.NewBuffer(nil) buf := bytes.NewBuffer(nil)
@ -58,7 +59,7 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error {
//lint:ignore SA1019 We use legacy -state flag for now to plan the import changes based on temporary state file //lint:ignore SA1019 We use legacy -state flag for now to plan the import changes based on temporary state file
changed, err := tf.Plan(ctx, tfexec.State(tmpState), tfexec.Target(importAddress)) changed, err := tf.Plan(ctx, tfexec.State(tmpState), tfexec.Target(importAddress))
if err != nil { if err != nil {
return fmt.Errorf("terraform plan: %w", err) return diag.Errorf("terraform plan: %v", err)
} }
defer os.RemoveAll(tmpDir) defer os.RemoveAll(tmpDir)
@ -70,29 +71,29 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) error {
cmdio.LogString(ctx, output) cmdio.LogString(ctx, output)
ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.") ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.")
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
if !ans { if !ans {
return fmt.Errorf("import aborted") return diag.Errorf("import aborted")
} }
} }
// If user confirmed changes, move the state file from temp dir to state location // If user confirmed changes, move the state file from temp dir to state location
f, err := os.Create(filepath.Join(dir, TerraformStateFileName)) f, err := os.Create(filepath.Join(dir, TerraformStateFileName))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer f.Close() defer f.Close()
tmpF, err := os.Open(tmpState) tmpF, err := os.Open(tmpState)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer tmpF.Close() defer tmpF.Close()
_, err = io.Copy(f, tmpF) _, err = io.Copy(f, tmpF)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -12,6 +12,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/hashicorp/go-version" "github.com/hashicorp/go-version"
@ -151,7 +152,7 @@ func setProxyEnvVars(ctx context.Context, environ map[string]string, b *bundle.B
return nil return nil
} }
func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tfConfig := b.Config.Bundle.Terraform tfConfig := b.Config.Bundle.Terraform
if tfConfig == nil { if tfConfig == nil {
tfConfig = &config.Terraform{} tfConfig = &config.Terraform{}
@ -160,46 +161,46 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error {
execPath, err := m.findExecPath(ctx, b, tfConfig) execPath, err := m.findExecPath(ctx, b, tfConfig)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
workingDir, err := Dir(ctx, b) workingDir, err := Dir(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
tf, err := tfexec.NewTerraform(workingDir, execPath) tf, err := tfexec.NewTerraform(workingDir, execPath)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
environ, err := b.AuthEnv() environ, err := b.AuthEnv()
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
err = inheritEnvVars(ctx, environ) err = inheritEnvVars(ctx, environ)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Set the temporary directory environment variables // Set the temporary directory environment variables
err = setTempDirEnvVars(ctx, environ, b) err = setTempDirEnvVars(ctx, environ, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Set the proxy related environment variables // Set the proxy related environment variables
err = setProxyEnvVars(ctx, environ, b) err = setProxyEnvVars(ctx, environ, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Configure environment variables for auth for Terraform to use. // Configure environment variables for auth for Terraform to use.
log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", ")) log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", "))
err = tf.SetEnv(environ) err = tf.SetEnv(environ)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
b.Terraform = tf b.Terraform = tf

View File

@ -45,8 +45,8 @@ func TestInitEnvironmentVariables(t *testing.T) {
t.Setenv("DATABRICKS_TOKEN", "foobar") t.Setenv("DATABRICKS_TOKEN", "foobar")
b.WorkspaceClient() b.WorkspaceClient()
err = bundle.Apply(context.Background(), b, Initialize()) diags := bundle.Apply(context.Background(), b, Initialize())
require.NoError(t, err) require.NoError(t, diags.Error())
} }
func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) {

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
) )
@ -20,8 +21,8 @@ func (m *interpolateMutator) Name() string {
return "terraform.Interpolate" return "terraform.Interpolate"
} }
func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
prefix := dyn.MustPathFromString("resources") prefix := dyn.MustPathFromString("resources")
// Resolve variable references in all values. // Resolve variable references in all values.
@ -61,4 +62,6 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) error
return dyn.V(fmt.Sprintf("${%s}", path.String())), nil return dyn.V(fmt.Sprintf("${%s}", path.String())), nil
}) })
}) })
return diag.FromErr(err)
} }

View File

@ -55,8 +55,8 @@ func TestInterpolate(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, Interpolate()) diags := bundle.Apply(context.Background(), b, Interpolate())
require.NoError(t, err) require.NoError(t, diags.Error())
j := b.Config.Resources.Jobs["my_job"] j := b.Config.Resources.Jobs["my_job"]
assert.Equal(t, "${databricks_pipeline.other_pipeline.id}", j.Tags["other_pipeline"]) assert.Equal(t, "${databricks_pipeline.other_pipeline.id}", j.Tags["other_pipeline"])
@ -87,6 +87,6 @@ func TestInterpolateUnknownResourceType(t *testing.T) {
}, },
} }
err := bundle.Apply(context.Background(), b, Interpolate()) diags := bundle.Apply(context.Background(), b, Interpolate())
assert.Contains(t, err.Error(), `reference does not exist: ${resources.unknown.other_unknown.id}`) assert.ErrorContains(t, diags.Error(), `reference does not exist: ${resources.unknown.other_unknown.id}`)
} }

View File

@ -6,6 +6,7 @@ import (
"slices" "slices"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
) )
@ -22,31 +23,31 @@ func (l *load) Name() string {
return "terraform.Load" return "terraform.Load"
} }
func (l *load) Apply(ctx context.Context, b *bundle.Bundle) error { func (l *load) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
err := tf.Init(ctx, tfexec.Upgrade(true)) err := tf.Init(ctx, tfexec.Upgrade(true))
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
state, err := b.Terraform.Show(ctx) state, err := b.Terraform.Show(ctx)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
err = l.validateState(state) err = l.validateState(state)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Merge state into configuration. // Merge state into configuration.
err = TerraformToBundle(state, &b.Config) err = TerraformToBundle(state, &b.Config)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -32,10 +32,10 @@ func TestLoadWithNoState(t *testing.T) {
t.Setenv("DATABRICKS_TOKEN", "foobar") t.Setenv("DATABRICKS_TOKEN", "foobar")
b.WorkspaceClient() b.WorkspaceClient()
err = bundle.Apply(context.Background(), b, bundle.Seq( diags := bundle.Apply(context.Background(), b, bundle.Seq(
Initialize(), Initialize(),
Load(ErrorOnEmptyState), Load(ErrorOnEmptyState),
)) ))
require.ErrorContains(t, err, "Did you forget to run 'databricks bundle deploy'") require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'")
} }

View File

@ -7,6 +7,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/terraform" "github.com/databricks/cli/libs/terraform"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )
@ -26,30 +27,30 @@ func (p *plan) Name() string {
return "terraform.Plan" return "terraform.Plan"
} }
func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) error { func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
cmdio.LogString(ctx, "Starting plan computation") cmdio.LogString(ctx, "Starting plan computation")
err := tf.Init(ctx, tfexec.Upgrade(true)) err := tf.Init(ctx, tfexec.Upgrade(true))
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
// Persist computed plan // Persist computed plan
tfDir, err := Dir(ctx, b) tfDir, err := Dir(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
planPath := filepath.Join(tfDir, "plan") planPath := filepath.Join(tfDir, "plan")
destroy := p.goal == PlanDestroy destroy := p.goal == PlanDestroy
notEmpty, err := tf.Plan(ctx, tfexec.Destroy(destroy), tfexec.Out(planPath)) notEmpty, err := tf.Plan(ctx, tfexec.Destroy(destroy), tfexec.Out(planPath))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Set plan in main bundle struct for downstream mutators // Set plan in main bundle struct for downstream mutators

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy" "github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -45,15 +46,15 @@ func (l *statePull) remoteState(ctx context.Context, f filer.Filer) (*bytes.Buff
return &buf, nil return &buf, nil
} }
func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
f, err := l.filerFactory(b) f, err := l.filerFactory(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
dir, err := Dir(ctx, b) dir, err := Dir(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Download state file from filer to local cache directory. // Download state file from filer to local cache directory.
@ -61,7 +62,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error {
remote, err := l.remoteState(ctx, f) remote, err := l.remoteState(ctx, f)
if err != nil { if err != nil {
log.Infof(ctx, "Unable to open remote state file: %s", err) log.Infof(ctx, "Unable to open remote state file: %s", err)
return err return diag.FromErr(err)
} }
if remote == nil { if remote == nil {
log.Infof(ctx, "Remote state file does not exist") log.Infof(ctx, "Remote state file does not exist")
@ -71,7 +72,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error {
// Expect the state file to live under dir. // Expect the state file to live under dir.
local, err := os.OpenFile(filepath.Join(dir, TerraformStateFileName), os.O_CREATE|os.O_RDWR, 0600) local, err := os.OpenFile(filepath.Join(dir, TerraformStateFileName), os.O_CREATE|os.O_RDWR, 0600)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer local.Close() defer local.Close()
@ -88,7 +89,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error {
log.Infof(ctx, "Writing remote state file to local cache directory") log.Infof(ctx, "Writing remote state file to local cache directory")
_, err = io.Copy(local, bytes.NewReader(remote.Bytes())) _, err = io.Copy(local, bytes.NewReader(remote.Bytes()))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -15,12 +15,11 @@ import (
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
) )
func mockStateFilerForPull(t *testing.T, contents map[string]int, merr error) filer.Filer { func mockStateFilerForPull(t *testing.T, contents map[string]int, merr error) filer.Filer {
buf, err := json.Marshal(contents) buf, err := json.Marshal(contents)
require.NoError(t, err) assert.NoError(t, err)
f := mockfiler.NewMockFiler(t) f := mockfiler.NewMockFiler(t)
f. f.
@ -49,11 +48,11 @@ func TestStatePullLocalMissingRemoteMissing(t *testing.T) {
ctx := context.Background() ctx := context.Background()
b := statePullTestBundle(t) b := statePullTestBundle(t)
err := bundle.Apply(ctx, b, m) diags := bundle.Apply(ctx, b, m)
assert.NoError(t, err) assert.NoError(t, diags.Error())
// Confirm that no local state file has been written. // Confirm that no local state file has been written.
_, err = os.Stat(localStateFile(t, ctx, b)) _, err := os.Stat(localStateFile(t, ctx, b))
assert.ErrorIs(t, err, fs.ErrNotExist) assert.ErrorIs(t, err, fs.ErrNotExist)
} }
@ -64,8 +63,8 @@ func TestStatePullLocalMissingRemotePresent(t *testing.T) {
ctx := context.Background() ctx := context.Background()
b := statePullTestBundle(t) b := statePullTestBundle(t)
err := bundle.Apply(ctx, b, m) diags := bundle.Apply(ctx, b, m)
assert.NoError(t, err) assert.NoError(t, diags.Error())
// Confirm that the local state file has been updated. // Confirm that the local state file has been updated.
localState := readLocalState(t, ctx, b) localState := readLocalState(t, ctx, b)
@ -82,8 +81,8 @@ func TestStatePullLocalStale(t *testing.T) {
// Write a stale local state file. // Write a stale local state file.
writeLocalState(t, ctx, b, map[string]int{"serial": 4}) writeLocalState(t, ctx, b, map[string]int{"serial": 4})
err := bundle.Apply(ctx, b, m) diags := bundle.Apply(ctx, b, m)
assert.NoError(t, err) assert.NoError(t, diags.Error())
// Confirm that the local state file has been updated. // Confirm that the local state file has been updated.
localState := readLocalState(t, ctx, b) localState := readLocalState(t, ctx, b)
@ -100,8 +99,8 @@ func TestStatePullLocalEqual(t *testing.T) {
// Write a local state file with the same serial as the remote. // Write a local state file with the same serial as the remote.
writeLocalState(t, ctx, b, map[string]int{"serial": 5}) writeLocalState(t, ctx, b, map[string]int{"serial": 5})
err := bundle.Apply(ctx, b, m) diags := bundle.Apply(ctx, b, m)
assert.NoError(t, err) assert.NoError(t, diags.Error())
// Confirm that the local state file has not been updated. // Confirm that the local state file has not been updated.
localState := readLocalState(t, ctx, b) localState := readLocalState(t, ctx, b)
@ -118,8 +117,8 @@ func TestStatePullLocalNewer(t *testing.T) {
// Write a local state file with a newer serial as the remote. // Write a local state file with a newer serial as the remote.
writeLocalState(t, ctx, b, map[string]int{"serial": 6}) writeLocalState(t, ctx, b, map[string]int{"serial": 6})
err := bundle.Apply(ctx, b, m) diags := bundle.Apply(ctx, b, m)
assert.NoError(t, err) assert.NoError(t, diags.Error())
// Confirm that the local state file has not been updated. // Confirm that the local state file has not been updated.
localState := readLocalState(t, ctx, b) localState := readLocalState(t, ctx, b)

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy" "github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -20,21 +21,21 @@ func (l *statePush) Name() string {
return "terraform:state-push" return "terraform:state-push"
} }
func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
f, err := l.filerFactory(b) f, err := l.filerFactory(b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
dir, err := Dir(ctx, b) dir, err := Dir(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
// Expect the state file to live under dir. // Expect the state file to live under dir.
local, err := os.Open(filepath.Join(dir, TerraformStateFileName)) local, err := os.Open(filepath.Join(dir, TerraformStateFileName))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer local.Close() defer local.Close()
@ -43,7 +44,7 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error {
log.Infof(ctx, "Writing local state file to remote state directory") log.Infof(ctx, "Writing local state file to remote state directory")
err = f.Write(ctx, TerraformStateFileName, local, filer.CreateParentDirectories, filer.OverwriteIfExists) err = f.Write(ctx, TerraformStateFileName, local, filer.CreateParentDirectories, filer.OverwriteIfExists)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -56,6 +56,6 @@ func TestStatePush(t *testing.T) {
// Write a stale local state file. // Write a stale local state file.
writeLocalState(t, ctx, b, map[string]int{"serial": 4}) writeLocalState(t, ctx, b, map[string]int{"serial": 4})
err := bundle.Apply(ctx, b, m) diags := bundle.Apply(ctx, b, m)
assert.NoError(t, err) assert.NoError(t, diags.Error())
} }

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )
@ -13,20 +14,20 @@ type unbind struct {
resourceKey string resourceKey string
} }
func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) error { func (m *unbind) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return fmt.Errorf("terraform not initialized") return diag.Errorf("terraform not initialized")
} }
err := tf.Init(ctx, tfexec.Upgrade(true)) err := tf.Init(ctx, tfexec.Upgrade(true))
if err != nil { if err != nil {
return fmt.Errorf("terraform init: %w", err) return diag.Errorf("terraform init: %v", err)
} }
err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey)) err = tf.StateRm(ctx, fmt.Sprintf("%s.%s", m.resourceType, m.resourceKey))
if err != nil { if err != nil {
return fmt.Errorf("terraform state rm: %w", err) return diag.Errorf("terraform state rm: %v", err)
} }
return nil return nil

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
@ -17,10 +18,10 @@ func (w *write) Name() string {
return "terraform.Write" return "terraform.Write"
} }
func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { func (w *write) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
dir, err := Dir(ctx, b) dir, err := Dir(ctx, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
var root *schema.Root var root *schema.Root
@ -29,12 +30,12 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error {
return v, err return v, err
}) })
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
f, err := os.Create(filepath.Join(dir, TerraformConfigFileName)) f, err := os.Create(filepath.Join(dir, TerraformConfigFileName))
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
defer f.Close() defer f.Close()
@ -43,7 +44,7 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error {
enc.SetIndent("", " ") enc.SetIndent("", " ")
err = enc.Encode(root) err = enc.Encode(root)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
return nil return nil

View File

@ -2,9 +2,9 @@ package libraries
import ( import (
"context" "context"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
@ -19,17 +19,17 @@ func (a *match) Name() string {
return "libraries.MatchWithArtifacts" return "libraries.MatchWithArtifacts"
} }
func (a *match) Apply(ctx context.Context, b *bundle.Bundle) error { func (a *match) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tasks := findAllTasks(b) tasks := findAllTasks(b)
for _, task := range tasks { for _, task := range tasks {
if isMissingRequiredLibraries(task) { if isMissingRequiredLibraries(task) {
return fmt.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) return diag.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey)
} }
for j := range task.Libraries { for j := range task.Libraries {
lib := &task.Libraries[j] lib := &task.Libraries[j]
_, err := findArtifactFiles(ctx, lib, b) _, err := findArtifactFiles(ctx, lib, b)
if err != nil { if err != nil {
return err return diag.FromErr(err)
} }
} }
} }

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
) )
type LogStringMutator struct { type LogStringMutator struct {
@ -20,7 +21,7 @@ func LogString(message string) Mutator {
} }
} }
func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) error { func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
cmdio.LogString(ctx, m.message) cmdio.LogString(ctx, m.message)
return nil return nil

View File

@ -3,6 +3,7 @@ package bundle
import ( import (
"context" "context"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
@ -13,10 +14,10 @@ type Mutator interface {
Name() string Name() string
// Apply mutates the specified bundle object. // Apply mutates the specified bundle object.
Apply(context.Context, *Bundle) error Apply(context.Context, *Bundle) diag.Diagnostics
} }
func Apply(ctx context.Context, b *Bundle, m Mutator) error { func Apply(ctx context.Context, b *Bundle, m Mutator) diag.Diagnostics {
ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name())) ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name()))
log.Debugf(ctx, "Apply") log.Debugf(ctx, "Apply")
@ -24,7 +25,7 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error {
err := b.Config.MarkMutatorEntry(ctx) err := b.Config.MarkMutatorEntry(ctx)
if err != nil { if err != nil {
log.Errorf(ctx, "entry error: %s", err) log.Errorf(ctx, "entry error: %s", err)
return err return diag.Errorf("entry error: %s", err)
} }
defer func() { defer func() {
@ -34,28 +35,32 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error {
} }
}() }()
err = m.Apply(ctx, b) diags := m.Apply(ctx, b)
if err != nil {
// Log error in diagnostics if any.
// Note: errors should be logged when constructing them
// such that they are not logged multiple times.
// If this is done, we can omit this block.
if err := diags.Error(); err != nil {
log.Errorf(ctx, "Error: %s", err) log.Errorf(ctx, "Error: %s", err)
return err
} }
return nil return diags
} }
type funcMutator struct { type funcMutator struct {
fn func(context.Context, *Bundle) error fn func(context.Context, *Bundle) diag.Diagnostics
} }
func (m funcMutator) Name() string { func (m funcMutator) Name() string {
return "<func>" return "<func>"
} }
func (m funcMutator) Apply(ctx context.Context, b *Bundle) error { func (m funcMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
return m.fn(ctx, b) return m.fn(ctx, b)
} }
// ApplyFunc applies an inline-specified function mutator. // ApplyFunc applies an inline-specified function mutator.
func ApplyFunc(ctx context.Context, b *Bundle, fn func(context.Context, *Bundle) error) error { func ApplyFunc(ctx context.Context, b *Bundle, fn func(context.Context, *Bundle) diag.Diagnostics) diag.Diagnostics {
return Apply(ctx, b, funcMutator{fn}) return Apply(ctx, b, funcMutator{fn})
} }

Some files were not shown because too many files have changed in this diff Show More