Remove bundle.{Seq,If,Defer,newPhase,logString}, switch to regular functions (#2390)

## Changes
- Instead of constructing chains of mutators and then executing them,
execute them directly.
- Remove functionality related to chain-building: Seq, If, Defer,
newPhase, logString.
- Phases become functions that apply the changes directly rather than
construct mutator chains that will be called later.
- Add a helper ApplySeq to call multiple mutators, use it where
Apply+Seq were used before.

This is intended to be a refactoring without functional changes, but
there are a few behaviour changes:
- Since defer() is used to call unlock instead of bundle.Defer()
unlocking will now happen even in case of panics.
- In --debug, the phase names are are still logged once before start of
the phase but each entry no longer has 'seq' or phase name in it.
- The message "Deployment complete!" was printed even if
terraform.Apply() mutator had an error. It no longer does that.

## Motivation

The use of the chains was necessary when mutators were returning a list
of other mutators instead of calling them directly. But that has since
been removed, so now the chain machinery have no purpose anymore.

Use of direct functions simplifies the logic and makes bugs more
apparent and easy to fix.

Other improvements that this unlocks:
- Simpler stacktraces/debugging (breakpoints).
- Use of functions with narrowly scoped API: instead of mutators that
receive full bundle config, we can use focused functions that only deal
with sections they care about prepareGitSettings(currentGitSection) ->
updatedGitSection. This makes the data flow more apparent.
- Parallel computations across mutators (within phase): launch
goroutines fetching data from APIs at the beggining, process them once
they are ready.

## Tests
Existing tests.
This commit is contained in:
Denis Bilenko 2025-02-27 12:41:58 +01:00 committed by GitHub
parent 767c0a9ddb
commit e2db0cd0e2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
52 changed files with 545 additions and 869 deletions

View File

@ -1,81 +1,76 @@
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug" 10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345 10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
10:07:59 Debug: Apply pid=12345 mutator=load 10:07:59 Info: Phase: load pid=12345
10:07:59 Info: Phase: load pid=12345 mutator=load 10:07:59 Debug: Apply pid=12345 mutator=EntryPoint
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq 10:07:59 Debug: Apply pid=12345 mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint 10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit 10:07:59 Debug: Apply pid=12345 mutator=ProcessRootIncludes
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit 10:07:59 Debug: Apply pid=12345 mutator=VerifyCliVersion
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes 10:07:59 Debug: Apply pid=12345 mutator=EnvironmentsToTargets
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq 10:07:59 Debug: Apply pid=12345 mutator=ComputeIdToClusterId
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion 10:07:59 Debug: Apply pid=12345 mutator=InitializeVariables
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets 10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables 10:07:59 Debug: Apply pid=12345 mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default) 10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load) 10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=<func> 10:07:59 Debug: Apply pid=12345 mutator=<func>
10:07:59 Debug: Apply pid=12345 mutator=initialize 10:07:59 Info: Phase: initialize pid=12345
10:07:59 Info: Phase: initialize pid=12345 mutator=initialize 10:07:59 Debug: Apply pid=12345 mutator=validate:AllResourcesHaveValues
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq 10:07:59 Debug: Apply pid=12345 mutator=RewriteSyncPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues 10:07:59 Debug: Apply pid=12345 mutator=SyncDefaultPath
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths 10:07:59 Debug: Apply pid=12345 mutator=SyncInferRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath 10:07:59 Debug: Apply pid=12345 mutator=PopulateCurrentUser
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me 10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
< HTTP/1.1 200 OK < HTTP/1.1 200 OK
< { < {
< "id": "[USERID]", < "id": "[USERID]",
< "userName": "[USERNAME]" < "userName": "[USERNAME]"
< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true < } pid=12345 mutator=PopulateCurrentUser sdk=true
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails 10:07:59 Debug: Apply pid=12345 mutator=LoadGitDetails
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset 10:07:59 Debug: Apply pid=12345 mutator=ApplySourceLinkedDeploymentPreset
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot 10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot 10:07:59 Debug: Apply pid=12345 mutator=ExpandWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths 10:07:59 Debug: Apply pid=12345 mutator=DefaultWorkspacePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix 10:07:59 Debug: Apply pid=12345 mutator=PrependWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix 10:07:59 Debug: Apply pid=12345 mutator=RewriteWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables 10:07:59 Debug: Apply pid=12345 mutator=SetVariables
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(init)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load_resources)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(apply_mutators)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences 10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences 10:07:59 Debug: Apply pid=12345 mutator=ResolveResourceReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences 10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters 10:07:59 Debug: Apply pid=12345 mutator=MergeJobClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters 10:07:59 Debug: Apply pid=12345 mutator=MergeJobParameters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks 10:07:59 Debug: Apply pid=12345 mutator=MergeJobTasks
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters 10:07:59 Debug: Apply pid=12345 mutator=MergePipelineClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps 10:07:59 Debug: Apply pid=12345 mutator=MergeApps
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency 10:07:59 Debug: Apply pid=12345 mutator=CaptureSchemaDependency
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions 10:07:59 Debug: Apply pid=12345 mutator=CheckPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs 10:07:59 Debug: Apply pid=12345 mutator=SetRunAs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute 10:07:59 Debug: Apply pid=12345 mutator=OverrideCompute
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults 10:07:59 Debug: Apply pid=12345 mutator=ConfigureDashboardDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults 10:07:59 Debug: Apply pid=12345 mutator=ConfigureVolumeDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode 10:07:59 Debug: Apply pid=12345 mutator=ProcessTargetMode
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets 10:07:59 Debug: Apply pid=12345 mutator=ApplyPresets
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing 10:07:59 Debug: Apply pid=12345 mutator=DefaultQueueing
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths 10:07:59 Debug: Apply pid=12345 mutator=ExpandPipelineGlobPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS 10:07:59 Debug: Apply pid=12345 mutator=ConfigureWSFS
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths 10:07:59 Debug: Apply pid=12345 mutator=TranslatePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning 10:07:59 Debug: Apply pid=12345 mutator=PythonWrapperWarning
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate 10:07:59 Debug: Apply pid=12345 mutator=apps.Validate
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions 10:07:59 Debug: Apply pid=12345 mutator=ValidateSharedRootPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions 10:07:59 Debug: Apply pid=12345 mutator=ApplyBundlePermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions 10:07:59 Debug: Apply pid=12345 mutator=FilterCurrentUserFromPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotateJobs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotatePipelines
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Apply pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit 10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit 10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit
10:07:59 Debug: Apply pid=12345 mutator=validate 10:07:59 Debug: Apply pid=12345 mutator=validate
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 404 Not Found < HTTP/1.1 404 Not Found

View File

@ -70,6 +70,6 @@ env:
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}}) bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), &u)) diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), &u)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
} }

View File

@ -51,7 +51,7 @@ func TestAppsValidate(t *testing.T) {
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate())) diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "app.yml detected", diags[0].Summary) require.Equal(t, "app.yml detected", diags[0].Summary)
require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource") require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource")
@ -90,7 +90,7 @@ func TestAppsValidateSameSourcePath(t *testing.T) {
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate())) diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "Duplicate app source code path", diags[0].Summary) require.Equal(t, "Duplicate app source code path", diags[0].Summary)
require.Contains(t, diags[0].Detail, "has the same source code path as app resource") require.Contains(t, diags[0].Detail, "has the same source code path as app resource")

View File

@ -38,5 +38,5 @@ func (m *all) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
} }
} }
return bundle.Apply(ctx, b, bundle.Seq(out...)) return bundle.ApplySeq(ctx, b, out...)
} }

View File

@ -53,5 +53,5 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// if we do it before, any files that are generated by build command will // if we do it before, any files that are generated by build command will
// not be included into artifact.Files and thus will not be uploaded. // not be included into artifact.Files and thus will not be uploaded.
mutators = append(mutators, &expandGlobs{name: m.name}) mutators = append(mutators, &expandGlobs{name: m.name})
return bundle.Apply(ctx, b, bundle.Seq(mutators...)) return bundle.ApplySeq(ctx, b, mutators...)
} }

View File

@ -39,11 +39,11 @@ func TestExpandGlobs_Nominal(t *testing.T) {
bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
// Run prepare first to make paths absolute. // Run prepare first to make paths absolute.
&prepare{"test"}, &prepare{"test"},
&expandGlobs{"test"}, &expandGlobs{"test"},
)) )
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that the expanded paths are correct. // Assert that the expanded paths are correct.
@ -80,11 +80,11 @@ func TestExpandGlobs_InvalidPattern(t *testing.T) {
bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
// Run prepare first to make paths absolute. // Run prepare first to make paths absolute.
&prepare{"test"}, &prepare{"test"},
&expandGlobs{"test"}, &expandGlobs{"test"},
)) )
assert.Len(t, diags, 4) assert.Len(t, diags, 4)
assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[0].Summary) assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[0].Summary)
@ -128,11 +128,11 @@ func TestExpandGlobs_NoMatches(t *testing.T) {
bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
// Run prepare first to make paths absolute. // Run prepare first to make paths absolute.
&prepare{"test"}, &prepare{"test"},
&expandGlobs{"test"}, &expandGlobs{"test"},
)) )
assert.Len(t, diags, 2) assert.Len(t, diags, 2)
assert.Equal(t, "c*.txt: no matching files", diags[0].Summary) assert.Equal(t, "c*.txt: no matching files", diags[0].Summary)

View File

@ -98,5 +98,5 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
// Swap out the original includes list with the expanded globs. // Swap out the original includes list with the expanded globs.
b.Config.Include = files b.Config.Include = files
return bundle.Apply(ctx, b, bundle.Seq(out...)) return bundle.ApplySeq(ctx, b, out...)
} }

View File

@ -1,16 +1,19 @@
package mutator package mutator
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/bundle/config/loader"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
"github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/libs/diag"
) )
func DefaultMutators() []bundle.Mutator { func DefaultMutators(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return []bundle.Mutator{ return bundle.ApplySeq(ctx, b,
loader.EntryPoint(), loader.EntryPoint(),
// Execute preinit script before processing includes. // Execute preinit script before processing includes.
@ -31,5 +34,5 @@ func DefaultMutators() []bundle.Mutator {
// Note: This mutator must run before the target overrides are merged. // Note: This mutator must run before the target overrides are merged.
// See the mutator for more details. // See the mutator for more details.
validate.UniqueResourceKeys(), validate.UniqueResourceKeys(),
} )
} }

View File

@ -80,7 +80,7 @@ func TestPrependWorkspaceForDefaultConfig(t *testing.T) {
}, },
}, },
} }
diags := bundle.Apply(context.Background(), b, bundle.Seq(DefineDefaultWorkspaceRoot(), ExpandWorkspaceRoot(), DefineDefaultWorkspacePaths(), PrependWorkspacePrefix())) diags := bundle.ApplySeq(context.Background(), b, DefineDefaultWorkspaceRoot(), ExpandWorkspaceRoot(), DefineDefaultWorkspacePaths(), PrependWorkspacePrefix())
require.Empty(t, diags) require.Empty(t, diags)
require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev", b.Config.Workspace.RootPath) require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev", b.Config.Workspace.RootPath)
require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev/artifacts", b.Config.Workspace.ArtifactPath) require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev/artifacts", b.Config.Workspace.ArtifactPath)

View File

@ -163,8 +163,7 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
func TestProcessTargetModeDevelopment(t *testing.T) { func TestProcessTargetModeDevelopment(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Job 1 // Job 1
@ -224,8 +223,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that tag normalization took place. // Assert that tag normalization took place.
@ -239,8 +237,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that tag normalization took place (Azure allows more characters than AWS). // Assert that tag normalization took place (Azure allows more characters than AWS).
@ -254,8 +251,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that tag normalization took place. // Assert that tag normalization took place.
@ -311,8 +307,7 @@ func TestValidateDevelopmentMode(t *testing.T) {
func TestProcessTargetModeDefault(t *testing.T) { func TestProcessTargetModeDefault(t *testing.T) {
b := mockBundle("") b := mockBundle("")
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
@ -429,8 +424,7 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) {
reflect.TypeOf(&resources.Volume{}), reflect.TypeOf(&resources.Volume{}),
} }
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
resources := reflect.ValueOf(b.Config.Resources) resources := reflect.ValueOf(b.Config.Resources)
@ -484,8 +478,7 @@ func TestPrefixAlreadySet(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.NamePrefix = "custom_lennart_deploy_" b.Config.Presets.NamePrefix = "custom_lennart_deploy_"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "custom_lennart_deploy_job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "custom_lennart_deploy_job1", b.Config.Resources.Jobs["job1"].Name)
@ -498,8 +491,7 @@ func TestTagsAlreadySet(t *testing.T) {
"dev": "foo", "dev": "foo",
} }
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "tag", b.Config.Resources.Jobs["job1"].Tags["custom"]) assert.Equal(t, "tag", b.Config.Resources.Jobs["job1"].Tags["custom"])
@ -510,8 +502,7 @@ func TestTagsNil(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.Tags = nil b.Config.Presets.Tags = nil
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"]) assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"])
@ -521,8 +512,7 @@ func TestTagsEmptySet(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.Tags = map[string]string{} b.Config.Presets.Tags = map[string]string{}
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"]) assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"])
@ -532,8 +522,7 @@ func TestJobsMaxConcurrentRunsAlreadySet(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.JobsMaxConcurrentRuns = 10 b.Config.Presets.JobsMaxConcurrentRuns = 10
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, 10, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns) assert.Equal(t, 10, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns)
@ -543,8 +532,7 @@ func TestJobsMaxConcurrentRunsDisabled(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.JobsMaxConcurrentRuns = 1 b.Config.Presets.JobsMaxConcurrentRuns = 1
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns) assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns)
@ -554,8 +542,7 @@ func TestTriggerPauseStatusWhenUnpaused(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.TriggerPauseStatus = config.Unpaused b.Config.Presets.TriggerPauseStatus = config.Unpaused
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.ErrorContains(t, diags.Error(), "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default") require.ErrorContains(t, diags.Error(), "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default")
} }
@ -564,8 +551,7 @@ func TestPipelinesDevelopmentDisabled(t *testing.T) {
notEnabled := false notEnabled := false
b.Config.Presets.PipelinesDevelopment = &notEnabled b.Config.Presets.PipelinesDevelopment = &notEnabled
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development)

View File

@ -176,7 +176,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
{ClusterId: "9876-5432-xywz", ClusterName: "some other cluster"}, {ClusterId: "9876-5432-xywz", ClusterName: "some other cluster"},
}, nil) }, nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.ApplySeq(context.Background(), b, ResolveVariableReferencesInLookup(), ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster) require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster)
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
@ -203,7 +203,7 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.ApplySeq(context.Background(), b, ResolveVariableReferencesInLookup(), ResolveResourceReferences())
require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables") require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables")
} }
@ -229,7 +229,7 @@ func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) {
ctx := context.Background() ctx := context.Background()
ctx = env.Set(ctx, "BUNDLE_VAR_lookup", "1234-5678-abcd") ctx = env.Set(ctx, "BUNDLE_VAR_lookup", "1234-5678-abcd")
diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.ApplySeq(ctx, b, SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
} }

View File

@ -828,12 +828,10 @@ func TestTranslatePathWithComplexVariables(t *testing.T) {
}) })
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
diags = bundle.Apply(ctx, b, diags = bundle.ApplySeq(ctx, b,
bundle.Seq(
mutator.SetVariables(), mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"), mutator.ResolveVariableReferences("variables"),
mutator.TranslatePaths(), mutator.TranslatePaths())
))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal( assert.Equal(

View File

@ -1,30 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/diag"
)
type DeferredMutator struct {
mutator Mutator
finally Mutator
}
func (d *DeferredMutator) Name() string {
return "deferred"
}
func Defer(mutator, finally Mutator) Mutator {
return &DeferredMutator{
mutator: mutator,
finally: finally,
}
}
func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
var diags diag.Diagnostics
diags = diags.Extend(Apply(ctx, b, d.mutator))
diags = diags.Extend(Apply(ctx, b, d.finally))
return diags
}

View File

@ -1,114 +0,0 @@
package bundle
import (
"context"
"testing"
"github.com/databricks/cli/libs/diag"
"github.com/stretchr/testify/assert"
)
type mutatorWithError struct {
applyCalled int
errorMsg string
}
func (t *mutatorWithError) Name() string {
return "mutatorWithError"
}
func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics {
t.applyCalled++
return diag.Errorf(t.errorMsg) // nolint:govet
}
func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(m1, m2, m3), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenFirstFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(mErr, m1, m2), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(m1, mErr, m2), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenLastOneFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(m1, m2, mErr), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorCombinesErrorMessages(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanupErr := &mutatorWithError{errorMsg: "cleanup error occurred"}
deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
var errs []string
for _, d := range diags {
errs = append(errs, d.Summary)
}
assert.Contains(t, errs, "mutator error occurred")
assert.Contains(t, errs, "cleanup error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 1, cleanupErr.applyCalled)
}

View File

@ -32,10 +32,10 @@ func TestLoadWithNoState(t *testing.T) {
t.Setenv("DATABRICKS_TOKEN", "foobar") t.Setenv("DATABRICKS_TOKEN", "foobar")
b.WorkspaceClient() b.WorkspaceClient()
diags := bundle.Apply(context.Background(), b, bundle.Seq( diags := bundle.ApplySeq(context.Background(), b,
Initialize(), Initialize(),
Load(ErrorOnEmptyState), Load(ErrorOnEmptyState),
)) )
require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'") require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'")
} }

View File

@ -1,40 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/diag"
)
type ifMutator struct {
condition func(context.Context, *Bundle) (bool, error)
onTrueMutator Mutator
onFalseMutator Mutator
}
func If(
condition func(context.Context, *Bundle) (bool, error),
onTrueMutator Mutator,
onFalseMutator Mutator,
) Mutator {
return &ifMutator{
condition, onTrueMutator, onFalseMutator,
}
}
func (m *ifMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
v, err := m.condition(ctx, b)
if err != nil {
return diag.FromErr(err)
}
if v {
return Apply(ctx, b, m.onTrueMutator)
} else {
return Apply(ctx, b, m.onFalseMutator)
}
}
func (m *ifMutator) Name() string {
return "If"
}

View File

@ -1,53 +0,0 @@
package bundle
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
)
func TestIfMutatorTrue(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
ifMutator := If(func(context.Context, *Bundle) (bool, error) {
return true, nil
}, m1, m2)
b := &Bundle{}
diags := Apply(context.Background(), b, ifMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
}
func TestIfMutatorFalse(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
ifMutator := If(func(context.Context, *Bundle) (bool, error) {
return false, nil
}, m1, m2)
b := &Bundle{}
diags := Apply(context.Background(), b, ifMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
}
func TestIfMutatorError(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
ifMutator := If(func(context.Context, *Bundle) (bool, error) {
return true, assert.AnError
}, m1, m2)
b := &Bundle{}
diags := Apply(context.Background(), b, ifMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
}

View File

@ -93,7 +93,7 @@ func TestArtifactUploadForWorkspace(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Test that libraries path is updated // Test that libraries path is updated
@ -181,7 +181,7 @@ func TestArtifactUploadForVolumes(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Test that libraries path is updated // Test that libraries path is updated
@ -225,7 +225,7 @@ func TestArtifactUploadWithNoLibraryReference(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "/Workspace/foo/bar/artifacts/.internal/source.whl", b.Config.Artifacts["whl"].Files[0].RemotePath) require.Equal(t, "/Workspace/foo/bar/artifacts/.internal/source.whl", b.Config.Artifacts["whl"].Files[0].RemotePath)
@ -311,7 +311,7 @@ func TestUploadMultipleLibraries(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil).Once() ).Return(nil).Once()
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Test that libraries path is updated // Test that libraries path is updated

View File

@ -1,28 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
)
type LogStringMutator struct {
message string
}
func (d *LogStringMutator) Name() string {
return "log_string"
}
func LogString(message string) Mutator {
return &LogStringMutator{
message: message,
}
}
func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
cmdio.LogString(ctx, m.message)
return nil
}

View File

@ -48,6 +48,17 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) diag.Diagnostics {
return diags return diags
} }
func ApplySeq(ctx context.Context, b *Bundle, mutators ...Mutator) diag.Diagnostics {
diags := diag.Diagnostics{}
for _, m := range mutators {
diags = diags.Extend(Apply(ctx, b, m))
if diags.HasError() {
return diags
}
}
return diags
}
type funcMutator struct { type funcMutator struct {
fn func(context.Context, *Bundle) diag.Diagnostics fn func(context.Context, *Bundle) diag.Diagnostics
} }

View File

@ -19,7 +19,7 @@ func (t *testMutator) Name() string {
func (t *testMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { func (t *testMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
t.applyCalled++ t.applyCalled++
return Apply(ctx, b, Seq(t.nestedMutators...)) return ApplySeq(ctx, b, t.nestedMutators...)
} }
func TestMutator(t *testing.T) { func TestMutator(t *testing.T) {

View File

@ -34,7 +34,7 @@ func TestValidateSharedRootPermissionsForShared(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions())) diags := bundle.Apply(context.Background(), b, ValidateSharedRootPermissions())
require.Empty(t, diags) require.Empty(t, diags)
} }
@ -59,7 +59,7 @@ func TestValidateSharedRootPermissionsForSharedError(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions())) diags := bundle.Apply(context.Background(), b, ValidateSharedRootPermissions())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "the bundle root path /Workspace/Shared/foo/bar is writable by all workspace users", diags[0].Summary) require.Equal(t, "the bundle root path /Workspace/Shared/foo/bar is writable by all workspace users", diags[0].Summary)
require.Equal(t, diag.Warning, diags[0].Severity) require.Equal(t, diag.Warning, diags[0].Severity)

View File

@ -73,7 +73,7 @@ func TestApplyWorkspaceRootPermissions(t *testing.T) {
WorkspaceObjectType: "directories", WorkspaceObjectType: "directories",
}).Return(nil, nil) }).Return(nil, nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions())) diags := bundle.ApplySeq(context.Background(), b, ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions())
require.Empty(t, diags) require.Empty(t, diags)
} }

View File

@ -1,45 +1,57 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/lock"
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
func Bind(opts *terraform.BindOptions) bundle.Mutator { func Bind(ctx context.Context, b *bundle.Bundle, opts *terraform.BindOptions) (diags diag.Diagnostics) {
return newPhase( log.Info(ctx, "Phase: bind")
"bind",
[]bundle.Mutator{ diags = bundle.Apply(ctx, b, lock.Acquire())
lock.Acquire(), if diags.HasError() {
bundle.Defer( return diags
bundle.Seq( }
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalBind)))
}()
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.Import(opts), terraform.Import(opts),
terraform.StatePush(), terraform.StatePush(),
), ))
lock.Release(lock.GoalBind),
), return diags
},
)
} }
func Unbind(resourceType, resourceKey string) bundle.Mutator { func Unbind(ctx context.Context, b *bundle.Bundle, resourceType, resourceKey string) (diags diag.Diagnostics) {
return newPhase( log.Info(ctx, "Phase: unbind")
"unbind",
[]bundle.Mutator{ diags = bundle.Apply(ctx, b, lock.Acquire())
lock.Acquire(), if diags.HasError() {
bundle.Defer( return diags
bundle.Seq( }
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalUnbind)))
}()
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.Unbind(resourceType, resourceKey), terraform.Unbind(resourceType, resourceKey),
terraform.StatePush(), terraform.StatePush(),
), ))
lock.Release(lock.GoalUnbind),
), return diags
},
)
} }

View File

@ -1,19 +1,23 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/artifacts/whl"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
// The build phase builds artifacts. // The build phase builds artifacts.
func Build() bundle.Mutator { func Build(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: build")
"build",
[]bundle.Mutator{ return bundle.ApplySeq(ctx, b,
scripts.Execute(config.ScriptPreBuild), scripts.Execute(config.ScriptPreBuild),
whl.DetectPackage(), whl.DetectPackage(),
artifacts.InferMissingProperties(), artifacts.InferMissingProperties(),
@ -23,6 +27,5 @@ func Build() bundle.Mutator {
mutator.ResolveVariableReferences( mutator.ResolveVariableReferences(
"artifacts", "artifacts",
), ),
},
) )
} }

View File

@ -19,6 +19,8 @@ import (
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/bundle/trampoline" "github.com/databricks/cli/bundle/trampoline"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
terraformlib "github.com/databricks/cli/libs/terraform" terraformlib "github.com/databricks/cli/libs/terraform"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
@ -124,31 +126,52 @@ is removed from the catalog, but the underlying files are not deleted:`
return approved, nil return approved, nil
} }
// The deploy phase deploys artifacts and resources. func deployCore(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
// Core mutators that CRUD resources and modify deployment state. These // Core mutators that CRUD resources and modify deployment state. These
// mutators need informed consent if they are potentially destructive. // mutators need informed consent if they are potentially destructive.
deployCore := bundle.Defer( cmdio.LogString(ctx, "Deploying resources...")
bundle.Seq( diags := bundle.Apply(ctx, b, terraform.Apply())
bundle.LogString("Deploying resources..."),
terraform.Apply(), // following original logic, continuing with sequence below even if terraform had errors
),
bundle.Seq( diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePush(), terraform.StatePush(),
terraform.Load(), terraform.Load(),
apps.InterpolateVariables(), apps.InterpolateVariables(),
apps.UploadConfig(), apps.UploadConfig(),
metadata.Compute(), metadata.Compute(),
metadata.Upload(), metadata.Upload(),
bundle.LogString("Deployment complete!"), ))
),
)
deployMutator := bundle.Seq( if !diags.HasError() {
cmdio.LogString(ctx, "Deployment complete!")
}
return diags
}
// The deploy phase deploys artifacts and resources.
func Deploy(ctx context.Context, b *bundle.Bundle, outputHandler sync.OutputHandler) (diags diag.Diagnostics) {
log.Info(ctx, "Phase: deploy")
// Core mutators that CRUD resources and modify deployment state. These
// mutators need informed consent if they are potentially destructive.
diags = bundle.ApplySeq(ctx, b,
scripts.Execute(config.ScriptPreDeploy), scripts.Execute(config.ScriptPreDeploy),
lock.Acquire(), lock.Acquire(),
bundle.Defer( )
bundle.Seq(
if diags.HasError() {
// lock is not acquired here
return diags
}
// lock is acquired here
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalDeploy)))
}()
diags = bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.CheckDashboardsModifiedRemotely(), terraform.CheckDashboardsModifiedRemotely(),
deploy.StatePull(), deploy.StatePull(),
@ -165,19 +188,27 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
terraform.Write(), terraform.Write(),
terraform.CheckRunningResource(), terraform.CheckRunningResource(),
terraform.Plan(terraform.PlanGoal("deploy")), terraform.Plan(terraform.PlanGoal("deploy")),
bundle.If(
approvalForDeploy,
deployCore,
bundle.LogString("Deployment cancelled!"),
),
),
lock.Release(lock.GoalDeploy),
),
scripts.Execute(config.ScriptPostDeploy),
) )
return newPhase( if diags.HasError() {
"deploy", return diags
[]bundle.Mutator{deployMutator}, }
)
haveApproval, err := approvalForDeploy(ctx, b)
if err != nil {
diags = diags.Extend(diag.FromErr(err))
return diags
}
if haveApproval {
diags = diags.Extend(deployCore(ctx, b))
} else {
cmdio.LogString(ctx, "Deployment cancelled!")
}
if diags.HasError() {
return diags
}
return diags.Extend(bundle.Apply(ctx, b, scripts.Execute(config.ScriptPostDeploy)))
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
terraformlib "github.com/databricks/cli/libs/terraform" terraformlib "github.com/databricks/cli/libs/terraform"
@ -77,42 +78,65 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) {
return approved, nil return approved, nil
} }
// The destroy phase deletes artifacts and resources. func destroyCore(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
func Destroy() bundle.Mutator {
// Core destructive mutators for destroy. These require informed user consent. // Core destructive mutators for destroy. These require informed user consent.
destroyCore := bundle.Seq( diags := bundle.ApplySeq(ctx, b,
terraform.Apply(), terraform.Apply(),
files.Delete(), files.Delete(),
bundle.LogString("Destroy complete!"),
) )
destroyMutator := bundle.Seq( if !diags.HasError() {
lock.Acquire(), cmdio.LogString(ctx, "Destroy complete!")
bundle.Defer( }
bundle.Seq(
return diags
}
// The destroy phase deletes artifacts and resources.
func Destroy(ctx context.Context, b *bundle.Bundle) (diags diag.Diagnostics) {
log.Info(ctx, "Phase: destroy")
ok, err := assertRootPathExists(ctx, b)
if err != nil {
return diag.FromErr(err)
}
if !ok {
cmdio.LogString(ctx, "No active deployment found to destroy!")
return diags
}
diags = diags.Extend(bundle.Apply(ctx, b, lock.Acquire()))
if diags.HasError() {
return diags
}
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalDestroy)))
}()
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.Plan(terraform.PlanGoal("destroy")), terraform.Plan(terraform.PlanGoal("destroy")),
bundle.If( ))
approvalForDestroy,
destroyCore,
bundle.LogString("Destroy cancelled!"),
),
),
lock.Release(lock.GoalDestroy),
),
)
return newPhase( if diags.HasError() {
"destroy", return diags
[]bundle.Mutator{ }
// Only run deploy mutator if root path exists.
bundle.If( hasApproval, err := approvalForDestroy(ctx, b)
assertRootPathExists, if err != nil {
destroyMutator, diags = diags.Extend(diag.FromErr(err))
bundle.LogString("No active deployment found to destroy!"), return diags
), }
},
) if hasApproval {
diags = diags.Extend(destroyCore(ctx, b))
} else {
cmdio.LogString(ctx, "Destroy cancelled!")
}
return diags
} }

View File

@ -1,6 +1,8 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/apps" "github.com/databricks/cli/bundle/apps"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
@ -12,15 +14,17 @@ import (
"github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/permissions"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/bundle/trampoline" "github.com/databricks/cli/bundle/trampoline"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
// The initialize phase fills in defaults and connects to the workspace. // The initialize phase fills in defaults and connects to the workspace.
// Interpolation of fields referring to the "bundle" and "workspace" keys // Interpolation of fields referring to the "bundle" and "workspace" keys
// happens upon completion of this phase. // happens upon completion of this phase.
func Initialize() bundle.Mutator { func Initialize(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: initialize")
"initialize",
[]bundle.Mutator{ return bundle.ApplySeq(ctx, b,
validate.AllResourcesHaveValues(), validate.AllResourcesHaveValues(),
// Update all path fields in the sync block to be relative to the bundle root path. // Update all path fields in the sync block to be relative to the bundle root path.
@ -101,6 +105,5 @@ func Initialize() bundle.Mutator {
metadata.AnnotatePipelines(), metadata.AnnotatePipelines(),
terraform.Initialize(), terraform.Initialize(),
scripts.Execute(config.ScriptPostInit), scripts.Execute(config.ScriptPostInit),
},
) )
} }

View File

@ -1,29 +1,40 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
// The load phase loads configuration from disk and performs // The load phase loads configuration from disk and performs
// lightweight preprocessing (anything that can be done without network I/O). // lightweight preprocessing (anything that can be done without network I/O).
func Load() bundle.Mutator { func Load(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: load")
"load",
mutator.DefaultMutators(), return mutator.DefaultMutators(ctx, b)
)
} }
func LoadDefaultTarget() bundle.Mutator { func LoadDefaultTarget(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: load")
"load",
append(mutator.DefaultMutators(), mutator.SelectDefaultTarget()), diags := mutator.DefaultMutators(ctx, b)
) if diags.HasError() {
return diags
}
return diags.Extend(bundle.Apply(ctx, b, mutator.SelectDefaultTarget()))
} }
func LoadNamedTarget(target string) bundle.Mutator { func LoadNamedTarget(ctx context.Context, b *bundle.Bundle, target string) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: load")
"load",
append(mutator.DefaultMutators(), mutator.SelectTarget(target)), diags := mutator.DefaultMutators(ctx, b)
) if diags.HasError() {
return diags
}
return diags.Extend(bundle.Apply(ctx, b, mutator.SelectTarget(target)))
} }

View File

@ -1,33 +0,0 @@
// Package phases defines build phases as logical groups of [bundle.Mutator] instances.
package phases
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
)
// This phase type groups mutators that belong to a lifecycle phase.
// It expands into the specific mutators when applied.
type phase struct {
name string
mutators []bundle.Mutator
}
func newPhase(name string, mutators []bundle.Mutator) bundle.Mutator {
return &phase{
name: name,
mutators: mutators,
}
}
func (p *phase) Name() string {
return p.name
}
func (p *phase) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
log.Infof(ctx, "Phase: %s", p.Name())
return bundle.Apply(ctx, b, bundle.Seq(p.mutators...))
}

View File

@ -76,10 +76,10 @@ func setupBundle(t *testing.T) (context.Context, *bundle.Bundle, *mocks.MockWork
ctx := cmdio.MockDiscard(context.Background()) ctx := cmdio.MockDiscard(context.Background())
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
mutator.DefineDefaultWorkspacePaths(), mutator.DefineDefaultWorkspacePaths(),
mutator.TranslatePaths(), mutator.TranslatePaths(),
)) )
require.Empty(t, diags) require.Empty(t, diags)
return ctx, b, mwc return ctx, b, mwc

View File

@ -1,30 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/diag"
)
type seqMutator struct {
mutators []Mutator
}
func (s *seqMutator) Name() string {
return "seq"
}
func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
var diags diag.Diagnostics
for _, m := range s.mutators {
diags = diags.Extend(Apply(ctx, b, m))
if diags.HasError() {
break
}
}
return diags
}
func Seq(ms ...Mutator) Mutator {
return &seqMutator{mutators: ms}
}

View File

@ -1,91 +0,0 @@
package bundle
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSeqMutator(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(m1, m2, m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
}
func TestSeqWithDeferredMutator(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
m4 := &testMutator{}
seqMutator := Seq(m1, Defer(m2, m3), m4)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
assert.Equal(t, 1, m4.applyCalled)
}
func TestSeqWithErrorAndDeferredMutator(t *testing.T) {
errorMut := &mutatorWithError{errorMsg: "error msg"}
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(errorMut, Defer(m1, m2), m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 1, errorMut.applyCalled)
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 0, m3.applyCalled)
}
func TestSeqWithErrorInsideDeferredMutator(t *testing.T) {
errorMut := &mutatorWithError{errorMsg: "error msg"}
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(m1, Defer(errorMut, m2), m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, errorMut.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 0, m3.applyCalled)
}
func TestSeqWithErrorInsideFinallyStage(t *testing.T) {
errorMut := &mutatorWithError{errorMsg: "error msg"}
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(m1, Defer(m2, errorMut), m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, errorMut.applyCalled)
assert.Equal(t, 0, m3.applyCalled)
}

View File

@ -13,11 +13,10 @@ func TestApps(t *testing.T) {
b := load(t, "./apps") b := load(t, "./apps")
assert.Equal(t, "apps", b.Config.Bundle.Name) assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b, diags := bundle.ApplySeq(context.Background(), b,
bundle.Seq(
mutator.SetVariables(), mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"), mutator.ResolveVariableReferences("variables"),
)) )
assert.Empty(t, diags) assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"] app := b.Config.Resources.Apps["my_app"]
@ -37,11 +36,10 @@ func TestAppsOverride(t *testing.T) {
b := loadTarget(t, "./apps", "development") b := loadTarget(t, "./apps", "development")
assert.Equal(t, "apps", b.Config.Bundle.Name) assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b, diags := bundle.ApplySeq(context.Background(), b,
bundle.Seq(
mutator.SetVariables(), mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"), mutator.ResolveVariableReferences("variables"),
)) )
assert.Empty(t, diags) assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"] app := b.Config.Resources.Apps["my_app"]
assert.Equal(t, "my-app", app.Name) assert.Equal(t, "my-app", app.Name)

View File

@ -17,7 +17,7 @@ func TestIncludeInvalid(t *testing.T) {
ctx := context.Background() ctx := context.Background()
b, err := bundle.Load(ctx, "./include_invalid") b, err := bundle.Load(ctx, "./include_invalid")
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
require.Error(t, diags.Error()) require.Error(t, diags.Error())
assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files") assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files")
} }

View File

@ -20,7 +20,7 @@ func load(t *testing.T, path string) *bundle.Bundle {
ctx := context.Background() ctx := context.Background()
b, err := bundle.Load(ctx, path) b, err := bundle.Load(ctx, path)
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
return b return b
} }
@ -38,8 +38,9 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) {
return nil, diag.FromErr(err) return nil, diag.FromErr(err)
} }
diags := bundle.Apply(ctx, b, bundle.Seq( diags := phases.LoadNamedTarget(ctx, b, env)
phases.LoadNamedTarget(env),
diags = diags.Extend(bundle.ApplySeq(ctx, b,
mutator.RewriteSyncPaths(), mutator.RewriteSyncPaths(),
mutator.SyncDefaultPath(), mutator.SyncDefaultPath(),
mutator.SyncInferRoot(), mutator.SyncInferRoot(),
@ -69,10 +70,8 @@ func initializeTarget(t *testing.T, path, env string) (*bundle.Bundle, diag.Diag
configureMock(t, b) configureMock(t, b)
ctx := dbr.MockRuntime(context.Background(), false) ctx := dbr.MockRuntime(context.Background(), false)
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.Apply(ctx, b, mutator.SelectTarget(env))
mutator.SelectTarget(env), diags = diags.Extend(phases.Initialize(ctx, b))
phases.Initialize(),
))
return b, diags return b, diags
} }

View File

@ -18,7 +18,7 @@ func TestPythonWheelBuild(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel", "default") b := loadTarget(t, "./python_wheel/python_wheel", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl")
@ -34,7 +34,7 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl")
@ -50,7 +50,7 @@ func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl")
@ -66,7 +66,7 @@ func TestPythonWheelWithDBFSLib(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default") b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
match := libraries.ExpandGlobReferences() match := libraries.ExpandGlobReferences()
@ -78,7 +78,7 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
mockFiler := mockfiler.NewMockFiler(t) mockFiler := mockfiler.NewMockFiler(t)
@ -90,10 +90,10 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
libraries.ExpandGlobReferences(), libraries.ExpandGlobReferences(),
libraries.UploadWithClient(mockFiler), libraries.UploadWithClient(mockFiler),
)) )
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Empty(t, diags) require.Empty(t, diags)
require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl) require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl)
@ -103,7 +103,7 @@ func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
b := loadTarget(t, "./python_wheel/environment_key", "default") b := loadTarget(t, "./python_wheel/environment_key", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl")
@ -119,7 +119,7 @@ func TestPythonWheelBuildMultiple(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default") b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl")
@ -135,7 +135,7 @@ func TestPythonWheelNoBuild(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
match := libraries.ExpandGlobReferences() match := libraries.ExpandGlobReferences()

View File

@ -132,7 +132,7 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// The UniqueResourceKeys mutator is run as part of the Load phase. // The UniqueResourceKeys mutator is run as part of the Load phase.
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
assert.Equal(t, tc.diagnostics, diags) assert.Equal(t, tc.diagnostics, diags)
}) })
} }

View File

@ -8,8 +8,8 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
@ -61,22 +61,30 @@ s = f.getvalue()
dbutils.notebook.exit(s) dbutils.notebook.exit(s)
` `
type transformWheelTask struct{}
func (transformWheelTask) Name() string {
return "TransformWheelTask"
}
// This mutator takes the wheel task and transforms it into notebook // This mutator takes the wheel task and transforms it into notebook
// which installs uploaded wheels using %pip and then calling corresponding // which installs uploaded wheels using %pip and then calling corresponding
// entry point. // entry point.
func TransformWheelTask() bundle.Mutator { func TransformWheelTask() bundle.Mutator {
return bundle.If( return transformWheelTask{}
func(_ context.Context, b *bundle.Bundle) (bool, error) { }
res := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper
return res, nil func (transformWheelTask) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
}, isEnabled := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper
NewTrampoline( if !isEnabled {
return nil
}
return bundle.Apply(ctx, b, NewTrampoline(
"python_wheel", "python_wheel",
&pythonTrampoline{}, &pythonTrampoline{},
NOTEBOOK_TEMPLATE, NOTEBOOK_TEMPLATE,
), ))
mutator.NoOp(),
)
} }
type pythonTrampoline struct{} type pythonTrampoline struct{}

View File

@ -69,14 +69,19 @@ func newDeployCommand() *cobra.Command {
} }
} }
diags = diags.Extend( diags = diags.Extend(phases.Initialize(ctx, b))
bundle.Apply(ctx, b, bundle.Seq(
phases.Initialize(), if !diags.HasError() {
validate.FastValidate(), diags = diags.Extend(bundle.Apply(ctx, b, validate.FastValidate()))
phases.Build(), }
phases.Deploy(outputHandler),
)), if !diags.HasError() {
) diags = diags.Extend(phases.Build(ctx, b))
}
if !diags.HasError() {
diags = diags.Extend(phases.Deploy(ctx, b, outputHandler))
}
} }
renderOpts := render.RenderOptions{RenderSummaryTable: false} renderOpts := render.RenderOptions{RenderSummaryTable: false}

View File

@ -53,15 +53,15 @@ func newBindCommand() *cobra.Command {
return nil return nil
}) })
diags = bundle.Apply(ctx, b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(), if !diags.HasError() {
phases.Bind(&terraform.BindOptions{ diags = diags.Extend(phases.Bind(ctx, b, &terraform.BindOptions{
AutoApprove: autoApprove, AutoApprove: autoApprove,
ResourceType: resource.TerraformResourceName(), ResourceType: resource.TerraformResourceName(),
ResourceKey: args[0], ResourceKey: args[0],
ResourceId: args[1], ResourceId: args[1],
}), }))
)) }
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return fmt.Errorf("failed to bind the resource, err: %w", err) return fmt.Errorf("failed to bind the resource, err: %w", err)
} }

View File

@ -38,10 +38,10 @@ func newUnbindCommand() *cobra.Command {
return nil return nil
}) })
diags = bundle.Apply(cmd.Context(), b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(), if !diags.HasError() {
phases.Unbind(resource.TerraformResourceName(), args[0]), diags = diags.Extend(phases.Unbind(ctx, b, resource.TerraformResourceName(), args[0]))
)) }
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -61,20 +61,25 @@ func newDestroyCommand() *cobra.Command {
return errors.New("please specify --auto-approve since selected logging format is json") return errors.New("please specify --auto-approve since selected logging format is json")
} }
diags = bundle.Apply(ctx, b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(),
// We need to resolve artifact variable (how we do it in build phase)
// because some of the to-be-destroyed resource might use this variable.
// Not resolving might lead to terraform "Reference to undeclared resource" error
mutator.ResolveVariableReferences(
"artifacts",
),
phases.Destroy(),
))
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
return nil
diags = diags.Extend(
// We need to resolve artifact variable (how we do it in build phase)
// because some of the to-be-destroyed resource might use this variable.
// Not resolving might lead to terraform "Reference to undeclared resource" error
bundle.Apply(ctx, b, mutator.ResolveVariableReferences("artifacts")),
)
if err := diags.Error(); err != nil {
return err
}
diags = diags.Extend(phases.Destroy(ctx, b))
// QQQ we're not reporting warnings there. This would be addressed by switching to streaming warnings/errors instead of accumulating.
return diags.Error()
} }
return cmd return cmd

View File

@ -345,8 +345,12 @@ func (d *dashboard) initialize(b *bundle.Bundle) diag.Diagnostics {
} }
func (d *dashboard) runForResource(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (d *dashboard) runForResource(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
diags := bundle.Apply(ctx, b, bundle.Seq( diags := phases.Initialize(ctx, b)
phases.Initialize(), if diags.HasError() {
return diags
}
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.StatePull(), terraform.StatePull(),

View File

@ -67,7 +67,7 @@ func newOpenCommand() *cobra.Command {
return diags.Error() return diags.Error()
} }
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
@ -86,20 +86,20 @@ func newOpenCommand() *cobra.Command {
noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist)
if forcePull || noCache { if forcePull || noCache {
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
} }
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.Load(), terraform.Load(),
mutator.InitializeURLs(), mutator.InitializeURLs(),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -111,7 +111,7 @@ task or a Python wheel task, the second example applies.
return diags.Error() return diags.Error()
} }
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
@ -121,12 +121,12 @@ task or a Python wheel task, the second example applies.
return err return err
} }
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.StatePull(), terraform.StatePull(),
terraform.Load(terraform.ErrorOnEmptyState), terraform.Load(terraform.ErrorOnEmptyState),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -35,7 +35,7 @@ func newSummaryCommand() *cobra.Command {
return diags.Error() return diags.Error()
} }
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
@ -49,18 +49,20 @@ func newSummaryCommand() *cobra.Command {
noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist)
if forcePull || noCache { if forcePull || noCache {
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
} }
diags = bundle.Apply(ctx, b, diags = bundle.ApplySeq(ctx, b,
bundle.Seq(terraform.Load(), mutator.InitializeURLs())) terraform.Load(),
mutator.InitializeURLs(),
)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -71,7 +71,7 @@ func newSyncCommand() *cobra.Command {
} }
// Run initialize phase to make sure paths are set. // Run initialize phase to make sure paths are set.
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -46,7 +46,7 @@ func newValidateCommand() *cobra.Command {
} }
if !diags.HasError() { if !diags.HasError() {
diags = diags.Extend(bundle.Apply(ctx, b, phases.Initialize())) diags = diags.Extend(phases.Initialize(ctx, b))
} }
if !diags.HasError() { if !diags.HasError() {

View File

@ -74,16 +74,15 @@ func configureProfile(cmd *cobra.Command, b *bundle.Bundle) diag.Diagnostics {
// configureBundle loads the bundle configuration and configures flag values, if any. // configureBundle loads the bundle configuration and configures flag values, if any.
func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag.Diagnostics) { func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag.Diagnostics) {
var m bundle.Mutator
if target := getTarget(cmd); target == "" {
m = phases.LoadDefaultTarget()
} else {
m = phases.LoadNamedTarget(target)
}
// Load bundle and select target. // Load bundle and select target.
ctx := cmd.Context() ctx := cmd.Context()
diags := bundle.Apply(ctx, b, m) var diags diag.Diagnostics
if target := getTarget(cmd); target == "" {
diags = phases.LoadDefaultTarget(ctx, b)
} else {
diags = phases.LoadNamedTarget(ctx, b, target)
}
if diags.HasError() { if diags.HasError() {
return b, diags return b, diags
} }
@ -159,7 +158,7 @@ func targetCompletion(cmd *cobra.Command, args []string, toComplete string) ([]s
} }
// Load bundle but don't select a target (we're completing those). // Load bundle but don't select a target (we're completing those).
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
cobra.CompErrorln(err.Error()) cobra.CompErrorln(err.Error())
return nil, cobra.ShellCompDirectiveError return nil, cobra.ShellCompDirectiveError

View File

@ -80,7 +80,7 @@ func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) {
}, },
} }
diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// The remote path attribute on the artifact file should have been set. // The remote path attribute on the artifact file should have been set.
@ -144,7 +144,7 @@ func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
}, },
} }
diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// The remote path attribute on the artifact file should have been set. // The remote path attribute on the artifact file should have been set.
@ -213,7 +213,7 @@ func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
}, },
} }
diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// The remote path attribute on the artifact file should have been set. // The remote path attribute on the artifact file should have been set.

View File

@ -78,7 +78,7 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri
b, err := bundle.Load(ctx, filepath.Join(tempDir, "my_project")) b, err := bundle.Load(ctx, filepath.Join(tempDir, "my_project"))
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.LoadNamedTarget(target)) diags := phases.LoadNamedTarget(ctx, b, target)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Apply initialize / validation mutators // Apply initialize / validation mutators
@ -93,14 +93,12 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri
b.Tagging = tags.ForCloud(w.Config) b.Tagging = tags.ForCloud(w.Config)
b.WorkspaceClient() b.WorkspaceClient()
diags = bundle.Apply(ctx, b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(),
))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Apply build mutator // Apply build mutator
if build { if build {
diags = bundle.Apply(ctx, b, phases.Build()) diags = phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
} }
} }