From 6f9817e194092e47c7e5b41da4df62f1ea7caeeb Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Tue, 10 Sep 2024 12:52:31 +0200 Subject: [PATCH] add prompt and crud test for volumes --- .../deploy/terraform/tfdyn/convert_volume.go | 2 +- bundle/phases/deploy.go | 46 ++++++++-------- bundle/phases/deploy_test.go | 2 +- .../uc_volume/databricks_template_schema.json | 8 +++ .../uc_volume/template/databricks.yml.tmpl | 26 +++++++++ .../bundle/bundles/uc_volume/template/nb.sql | 2 + internal/bundle/deploy_test.go | 53 +++++++++++++++++++ 7 files changed, 112 insertions(+), 27 deletions(-) create mode 100644 internal/bundle/bundles/uc_volume/databricks_template_schema.json create mode 100644 internal/bundle/bundles/uc_volume/template/databricks.yml.tmpl create mode 100644 internal/bundle/bundles/uc_volume/template/nb.sql diff --git a/bundle/deploy/terraform/tfdyn/convert_volume.go b/bundle/deploy/terraform/tfdyn/convert_volume.go index dd06cf182..281cd4326 100644 --- a/bundle/deploy/terraform/tfdyn/convert_volume.go +++ b/bundle/deploy/terraform/tfdyn/convert_volume.go @@ -33,7 +33,7 @@ func (volumeConverter) Convert(ctx context.Context, key string, vin dyn.Value, o } // Add the converted resource to the output. - out.Schema[key] = vout.AsAny() + out.Volume[key] = vout.AsAny() // Configure grants for this resource. if grants := convertGrantsResource(ctx, vin); grants != nil { diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 49544227e..d71c61912 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -22,10 +22,12 @@ import ( tfjson "github.com/hashicorp/terraform-json" ) -func parseTerraformActions(changes []*tfjson.ResourceChange, toInclude func(typ string, actions tfjson.Actions) bool) []terraformlib.Action { +func filterDeleteOrRecreateActions(changes []*tfjson.ResourceChange, resourceType string) []terraformlib.Action { res := make([]terraformlib.Action, 0) for _, rc := range changes { - if !toInclude(rc.Type, rc.Change.Actions) { + // TODO: Add end to end integration tests for the interactive prompt UXs. + // Good PR to introduce the first one, and make changes more confidently. + if rc.Type != resourceType { continue } @@ -36,7 +38,7 @@ func parseTerraformActions(changes []*tfjson.ResourceChange, toInclude func(typ case rc.Change.Actions.Replace(): actionType = terraformlib.ActionTypeRecreate default: - // No use case for other action types yet. + // Filter other action types.. continue } @@ -62,30 +64,12 @@ func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) { return false, err } - schemaActions := parseTerraformActions(plan.ResourceChanges, func(typ string, actions tfjson.Actions) bool { - // Filter in only UC schema resources. - if typ != "databricks_schema" { - return false - } - - // We only display prompts for destructive actions like deleting or - // recreating a schema. - return actions.Delete() || actions.Replace() - }) - - dltActions := parseTerraformActions(plan.ResourceChanges, func(typ string, actions tfjson.Actions) bool { - // Filter in only DLT pipeline resources. - if typ != "databricks_pipeline" { - return false - } - - // Recreating DLT pipeline leads to metadata loss and for a transient period - // the underling tables will be unavailable. - return actions.Replace() || actions.Delete() - }) + schemaActions := filterDeleteOrRecreateActions(plan.ResourceChanges, "databricks_schema") + dltActions := filterDeleteOrRecreateActions(plan.ResourceChanges, "databricks_pipeline") + volumeActions := filterDeleteOrRecreateActions(plan.ResourceChanges, "databricks_volume") // We don't need to display any prompts in this case. - if len(dltActions) == 0 && len(schemaActions) == 0 { + if len(schemaActions) == 0 && len(dltActions) == 0 && len(volumeActions) == 0 { return true, nil } @@ -110,6 +94,18 @@ properties such as the 'catalog' or 'storage' are changed:` } } + // One or more volumes is being recreated. + if len(volumeActions) != 0 { + msg := ` +This action will result in the deletion or recreation of the following Volumes. For managed volumes, +this typically results in a deletion of the upstream data in the cloud tenant in ~30 days. For external +volumes the upstream data in the cloud tenant is not affected:` + cmdio.LogString(ctx, msg) + for _, action := range volumeActions { + cmdio.Log(ctx, action) + } + } + if b.AutoApprove { return true, nil } diff --git a/bundle/phases/deploy_test.go b/bundle/phases/deploy_test.go index e00370b38..f239302d4 100644 --- a/bundle/phases/deploy_test.go +++ b/bundle/phases/deploy_test.go @@ -40,7 +40,7 @@ func TestParseTerraformActions(t *testing.T) { }, } - res := parseTerraformActions(changes, func(typ string, actions tfjson.Actions) bool { + res := filterDeleteOrRecreateActions(changes, func(typ string, actions tfjson.Actions) bool { if typ != "databricks_pipeline" { return false } diff --git a/internal/bundle/bundles/uc_volume/databricks_template_schema.json b/internal/bundle/bundles/uc_volume/databricks_template_schema.json new file mode 100644 index 000000000..762f4470c --- /dev/null +++ b/internal/bundle/bundles/uc_volume/databricks_template_schema.json @@ -0,0 +1,8 @@ +{ + "properties": { + "unique_id": { + "type": "string", + "description": "Unique ID for the schema and pipeline names" + } + } +} diff --git a/internal/bundle/bundles/uc_volume/template/databricks.yml.tmpl b/internal/bundle/bundles/uc_volume/template/databricks.yml.tmpl new file mode 100644 index 000000000..9d377f8e7 --- /dev/null +++ b/internal/bundle/bundles/uc_volume/template/databricks.yml.tmpl @@ -0,0 +1,26 @@ +bundle: + name: test-uc-volumes-{{.unique_id}} + +variables: + schema_name: + default: ${resources.schemas.schema1.name} + +resources: + schemas: + schema1: + name: schema1-{{.unique_id}} + catalog_name: main + comment: This schema was created from DABs + + schema2: + name: schema2-{{.unique_id}} + catalog_name: main + comment: This schema was created from DABs + + volumes: + foo: + catalog_name: main + name: my_volume + schema_name: ${var.schema_name} + volume_type: MANAGED + comment: This volume was created from DABs. diff --git a/internal/bundle/bundles/uc_volume/template/nb.sql b/internal/bundle/bundles/uc_volume/template/nb.sql new file mode 100644 index 000000000..199ff5078 --- /dev/null +++ b/internal/bundle/bundles/uc_volume/template/nb.sql @@ -0,0 +1,2 @@ +-- Databricks notebook source +select 1 diff --git a/internal/bundle/deploy_test.go b/internal/bundle/deploy_test.go index 736c880db..225458e19 100644 --- a/internal/bundle/deploy_test.go +++ b/internal/bundle/deploy_test.go @@ -243,3 +243,56 @@ func TestAccDeployBasicBundleLogs(t *testing.T) { }, "\n"), stderr) assert.Equal(t, "", stdout) } + +func TestAccDeployUcVolume(t *testing.T) { + ctx, wt := acc.UcWorkspaceTest(t) + w := wt.W + + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, ctx, "uc_volume", map[string]any{ + "unique_id": uniqueId, + }) + require.NoError(t, err) + + err = deployBundle(t, ctx, bundleRoot) + require.NoError(t, err) + + t.Cleanup(func() { + destroyBundle(t, ctx, bundleRoot) + }) + + // Assert the volume is created successfully + catalogName := "main" + schemaName := "schema1-" + uniqueId + volumeName := "my_volume" + volume, err := w.Volumes.ReadByName(ctx, fmt.Sprintf("%s.%s.%s", catalogName, schemaName, volumeName)) + require.NoError(t, err) + require.Equal(t, volume.Name, volumeName) + require.Equal(t, catalogName, volume.CatalogName) + require.Equal(t, schemaName, volume.SchemaName) + + // Recreation of the volume without --auto-approve should fail since prompting is not possible + t.Setenv("TERM", "dumb") + t.Setenv("BUNDLE_ROOT", bundleRoot) + stdout, stderr, err := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}").Run() + assert.Error(t, err) + assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following Volumes. For managed volumes, +this typically results in a deletion of the upstream data in the cloud tenant in ~30 days. For external +volumes the upstream data in the cloud tenant is not affected: + recreate volume foo`) + assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") + + // Recreation of the volume without --auto-approve should fail since prompting is not possible + t.Setenv("TERM", "dumb") + t.Setenv("BUNDLE_ROOT", bundleRoot) + _, _, err = internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}", "--auto-approve").Run() + assert.NoError(t, err) + + // Assert the volume is updated successfully + schemaName = "schema2-" + uniqueId + volume, err = w.Volumes.ReadByName(ctx, fmt.Sprintf("%s.%s.%s", catalogName, schemaName, volumeName)) + require.NoError(t, err) + require.Equal(t, volume.Name, volumeName) + require.Equal(t, catalogName, volume.CatalogName) + require.Equal(t, schemaName, volume.SchemaName) +}