Annotate DLT pipelines when deployed using DABs (#1410)

## Changes
This PR annotates any pipelines that were deployed using DABs to have
`deployment.kind` set to "BUNDLE", mirroring the annotation for Jobs
(similar PR for jobs FYI: https://github.com/databricks/cli/pull/880).

Breakglass UI is not yet available for pipelines, so this annotation
will just be used for revenue attribution ATM.

Note: The API field has been deployed in all regions including GovCloud.

## Tests
Unit tests and manually.

Manually verified that the kind and metadata_file_path are being set by
DABs, and are returned by a GET API to a pipeline deployed using a DAB.
Example:
```
    "deployment": {
      "kind":"BUNDLE",
      "metadata_file_path":"/Users/shreyas.goenka@databricks.com/.bundle/bundle-playground/default/state/metadata.json"
    },
```
This commit is contained in:
shreyas-goenka 2024-05-01 14:07:03 +05:30 committed by GitHub
parent 153141d3ea
commit 507053ee50
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 115 additions and 4 deletions

View File

@ -2,7 +2,6 @@ package metadata
import (
"context"
"path"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
@ -27,7 +26,7 @@ func (m *annotateJobs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnosti
job.JobSettings.Deployment = &jobs.JobDeployment{
Kind: jobs.JobDeploymentKindBundle,
MetadataFilePath: path.Join(b.Config.Workspace.StatePath, MetadataFileName),
MetadataFilePath: metadataFilePath(b),
}
job.JobSettings.EditMode = jobs.JobEditModeUiLocked
job.JobSettings.Format = jobs.FormatMultiTask

View File

@ -0,0 +1,34 @@
package metadata
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/pipelines"
)
type annotatePipelines struct{}
func AnnotatePipelines() bundle.Mutator {
return &annotatePipelines{}
}
func (m *annotatePipelines) Name() string {
return "metadata.AnnotatePipelines"
}
func (m *annotatePipelines) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
for _, pipeline := range b.Config.Resources.Pipelines {
if pipeline.PipelineSpec == nil {
continue
}
pipeline.PipelineSpec.Deployment = &pipelines.PipelineDeployment{
Kind: pipelines.DeploymentKindBundle,
MetadataFilePath: metadataFilePath(b),
}
}
return nil
}

View File

@ -0,0 +1,72 @@
package metadata
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAnnotatePipelinesMutator(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
StatePath: "/a/b/c",
},
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"my-pipeline-1": {
PipelineSpec: &pipelines.PipelineSpec{
Name: "My Pipeline One",
},
},
"my-pipeline-2": {
PipelineSpec: &pipelines.PipelineSpec{
Name: "My Pipeline Two",
},
},
},
},
},
}
diags := bundle.Apply(context.Background(), b, AnnotatePipelines())
require.NoError(t, diags.Error())
assert.Equal(t,
&pipelines.PipelineDeployment{
Kind: pipelines.DeploymentKindBundle,
MetadataFilePath: "/a/b/c/metadata.json",
},
b.Config.Resources.Pipelines["my-pipeline-1"].PipelineSpec.Deployment)
assert.Equal(t,
&pipelines.PipelineDeployment{
Kind: pipelines.DeploymentKindBundle,
MetadataFilePath: "/a/b/c/metadata.json",
},
b.Config.Resources.Pipelines["my-pipeline-2"].PipelineSpec.Deployment)
}
func TestAnnotatePipelinesMutatorPipelineWithoutASpec(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
StatePath: "/a/b/c",
},
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"my-pipeline-1": {},
},
},
},
}
diags := bundle.Apply(context.Background(), b, AnnotatePipelines())
require.NoError(t, diags.Error())
}

View File

@ -4,13 +4,18 @@ import (
"bytes"
"context"
"encoding/json"
"path"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer"
)
const MetadataFileName = "metadata.json"
const metadataFileName = "metadata.json"
func metadataFilePath(b *bundle.Bundle) string {
return path.Join(b.Config.Workspace.StatePath, metadataFileName)
}
type upload struct{}
@ -33,5 +38,5 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return diag.FromErr(err)
}
return diag.FromErr(f.Write(ctx, MetadataFileName, bytes.NewReader(metadata), filer.CreateParentDirectories, filer.OverwriteIfExists))
return diag.FromErr(f.Write(ctx, metadataFileName, bytes.NewReader(metadata), filer.CreateParentDirectories, filer.OverwriteIfExists))
}

View File

@ -45,6 +45,7 @@ func Initialize() bundle.Mutator {
permissions.ApplyBundlePermissions(),
permissions.FilterCurrentUser(),
metadata.AnnotateJobs(),
metadata.AnnotatePipelines(),
terraform.Initialize(),
scripts.Execute(config.ScriptPostInit),
},