diff --git a/bundle/apps/interpolate_variables.go b/bundle/apps/interpolate_variables.go new file mode 100644 index 000000000..206609a12 --- /dev/null +++ b/bundle/apps/interpolate_variables.go @@ -0,0 +1,69 @@ +package apps + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" +) + +type interpolateVariables struct{} + +func (i *interpolateVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + pattern := dyn.NewPattern( + dyn.Key("resources"), + dyn.Key("apps"), + dyn.AnyKey(), + dyn.Key("config"), + ) + + err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { + return dyn.MapByPattern(root, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) { + switch path[0] { + case dyn.Key("databricks_pipeline"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("pipelines")).Append(path[1:]...) + case dyn.Key("databricks_job"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs")).Append(path[1:]...) + case dyn.Key("databricks_mlflow_model"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("models")).Append(path[1:]...) + case dyn.Key("databricks_mlflow_experiment"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("experiments")).Append(path[1:]...) + case dyn.Key("databricks_model_serving"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("model_serving_endpoints")).Append(path[1:]...) + case dyn.Key("databricks_registered_model"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("registered_models")).Append(path[1:]...) + case dyn.Key("databricks_quality_monitor"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("quality_monitors")).Append(path[1:]...) + case dyn.Key("databricks_schema"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("schemas")).Append(path[1:]...) + case dyn.Key("databricks_volume"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("volumes")).Append(path[1:]...) + case dyn.Key("databricks_cluster"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("clusters")).Append(path[1:]...) + case dyn.Key("databricks_dashboard"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("dashboards")).Append(path[1:]...) + case dyn.Key("databricks_app"): + path = dyn.NewPath(dyn.Key("resources"), dyn.Key("apps")).Append(path[1:]...) + default: + // Trigger "key not found" for unknown resource types. + return dyn.GetByPath(root, path) + } + + return dyn.GetByPath(root, path) + }) + }) + }) + + return diag.FromErr(err) +} + +func (i *interpolateVariables) Name() string { + return "apps.InterpolateVariables" +} + +func InterpolateVariables() bundle.Mutator { + return &interpolateVariables{} +} diff --git a/bundle/apps/interpolate_variables_test.go b/bundle/apps/interpolate_variables_test.go new file mode 100644 index 000000000..4fec8c6da --- /dev/null +++ b/bundle/apps/interpolate_variables_test.go @@ -0,0 +1,49 @@ +package apps + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/apps" + "github.com/stretchr/testify/require" +) + +func TestAppInterpolateVariables(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Apps: map[string]*resources.App{ + "my_app_1": { + App: &apps.App{ + Name: "my_app_1", + }, + Config: map[string]any{ + "command": []string{"echo", "hello"}, + "env": []map[string]string{ + {"name": "JOB_ID", "value": "${resources.jobs.my_job.id}"}, + }, + }, + }, + "my_app_2": { + App: &apps.App{ + Name: "my_app_2", + }, + }, + }, + Jobs: map[string]*resources.Job{ + "my_job": { + ID: "123", + }, + }, + }, + }, + } + + diags := bundle.Apply(context.Background(), b, InterpolateVariables()) + require.Empty(t, diags) + require.Equal(t, []any([]any{map[string]any{"name": "JOB_ID", "value": "123"}}), b.Config.Resources.Apps["my_app_1"].Config["env"]) + require.Nil(t, b.Config.Resources.Apps["my_app_2"].Config) +} diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 2dc9623bd..17c09bcff 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/apps" "github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" @@ -135,6 +136,8 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator { bundle.Seq( terraform.StatePush(), terraform.Load(), + apps.InterpolateVariables(), + apps.UploadConfig(), metadata.Compute(), metadata.Upload(), bundle.LogString("Deployment complete!"), diff --git a/internal/bundle/apps_test.go b/internal/bundle/apps_test.go index b25b3916e..89acc5cdc 100644 --- a/internal/bundle/apps_test.go +++ b/internal/bundle/apps_test.go @@ -2,6 +2,7 @@ package bundle import ( "fmt" + "io" "testing" "github.com/databricks/cli/internal" @@ -48,6 +49,29 @@ func TestAccDeployBundleWithApp(t *testing.T) { require.NoError(t, err) require.NotNil(t, app) + // Check app config + currentUser, err := wt.W.CurrentUser.Me(ctx) + require.NoError(t, err) + + pathToAppYml := fmt.Sprintf("/Workspace/Users/%s/.bundle/%s/files/app/app.yml", currentUser.UserName, uniqueId) + reader, err := wt.W.Workspace.Download(ctx, pathToAppYml) + require.NoError(t, err) + + data, err := io.ReadAll(reader) + require.NoError(t, err) + + job, err := wt.W.Jobs.GetBySettingsName(ctx, fmt.Sprintf("test-job-with-cluster-%s", uniqueId)) + + content := string(data) + require.Contains(t, content, fmt.Sprintf(`command: + - flask + - --app + - app + - run +env: + - name: JOB_ID + value: "%d"`, job.JobId)) + // Try to run the app _, out, err := runResourceWithStderr(t, ctx, root, "test_app") require.NoError(t, err) diff --git a/internal/bundle/bundles/apps/template/databricks.yml.tmpl b/internal/bundle/bundles/apps/template/databricks.yml.tmpl index 9ab21bf6f..4d862a06f 100644 --- a/internal/bundle/bundles/apps/template/databricks.yml.tmpl +++ b/internal/bundle/bundles/apps/template/databricks.yml.tmpl @@ -18,7 +18,7 @@ resources: - run env: - name: JOB_ID - valueFrom: "app-job" + value: ${resources.jobs.foo.id} resources: - name: "app-job"