Add DABs support for AI/BI dashboards

This commit is contained in:
Pieter Noordhuis 2024-09-03 10:35:41 +02:00
parent 0f4891f0fe
commit 3461c66dc9
No known key found for this signature in database
GPG Key ID: 12ACCCC104CF2930
10 changed files with 341 additions and 0 deletions

View File

@ -162,6 +162,20 @@ func (t *translateContext) translateNoOp(literal, localFullPath, localRelPath, r
return localRelPath, nil return localRelPath, nil
} }
func (t *translateContext) retainLocalAbsoluteFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
info, err := t.b.SyncRoot.Stat(localRelPath)
if errors.Is(err, fs.ErrNotExist) {
return "", fmt.Errorf("file %s not found", literal)
}
if err != nil {
return "", fmt.Errorf("unable to determine if %s is a file: %w", localFullPath, err)
}
if info.IsDir() {
return "", fmt.Errorf("expected %s to be a file but found a directory", literal)
}
return localFullPath, nil
}
func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) { func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) {
if !strings.HasPrefix(localRelPath, ".") { if !strings.HasPrefix(localRelPath, ".") {
localRelPath = "." + string(filepath.Separator) + localRelPath localRelPath = "." + string(filepath.Separator) + localRelPath
@ -215,6 +229,7 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
t.applyJobTranslations, t.applyJobTranslations,
t.applyPipelineTranslations, t.applyPipelineTranslations,
t.applyArtifactTranslations, t.applyArtifactTranslations,
t.applyDashboardTranslations,
} { } {
v, err = fn(v) v, err = fn(v)
if err != nil { if err != nil {

View File

@ -0,0 +1,50 @@
package mutator
import (
"fmt"
"github.com/databricks/cli/libs/dyn"
)
type dashboardRewritePattern struct {
pattern dyn.Pattern
fn rewriteFunc
}
func (t *translateContext) dashboardRewritePatterns() []dashboardRewritePattern {
// Base pattern to match all dashboards.
base := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("dashboards"),
dyn.AnyKey(),
)
// Compile list of configuration paths to rewrite.
return []dashboardRewritePattern{
{
base.Append(dyn.Key("definition_path")),
t.retainLocalAbsoluteFilePath,
},
}
}
func (t *translateContext) applyDashboardTranslations(v dyn.Value) (dyn.Value, error) {
var err error
for _, rewritePattern := range t.dashboardRewritePatterns() {
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[1].Key()
dir, err := v.Location().Directory()
if err != nil {
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for dashboard %s: %w", key, err)
}
return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, "")
})
if err != nil {
return dyn.InvalidValue, err
}
}
return v, nil
}

View File

@ -19,6 +19,7 @@ type Resources struct {
RegisteredModels map[string]*resources.RegisteredModel `json:"registered_models,omitempty"` RegisteredModels map[string]*resources.RegisteredModel `json:"registered_models,omitempty"`
QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"` QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"`
Schemas map[string]*resources.Schema `json:"schemas,omitempty"` Schemas map[string]*resources.Schema `json:"schemas,omitempty"`
Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"`
} }
type ConfigResource interface { type ConfigResource interface {

View File

@ -0,0 +1,59 @@
package resources
import (
"context"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
"github.com/databricks/databricks-sdk-go/service/dashboards"
)
type Dashboard struct {
ID string `json:"id,omitempty" bundle:"readonly"`
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
// ===========================
// === BEGIN OF API FIELDS ===
// ===========================
// DisplayName is the name of the dashboard (both as title and as basename in the workspace).
DisplayName string `json:"display_name,omitempty"`
// ParentPath is the path to the parent directory of the dashboard.
ParentPath string `json:"parent_path,omitempty"`
// WarehouseID is the ID of the warehouse to use for the dashboard.
WarehouseID string `json:"warehouse_id,omitempty"`
// ===========================
// ==== END OF API FIELDS ====
// ===========================
// DefinitionPath points to the local `.lvdash.json` file containing the dashboard definition.
DefinitionPath string `json:"definition_path,omitempty"`
}
func (s *Dashboard) UnmarshalJSON(b []byte) error {
return marshal.Unmarshal(b, s)
}
func (s Dashboard) MarshalJSON() ([]byte, error) {
return marshal.Marshal(s)
}
func (_ *Dashboard) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
_, err := w.Lakeview.Get(ctx, dashboards.GetDashboardRequest{
DashboardId: id,
})
if err != nil {
log.Debugf(ctx, "Dashboard %s does not exist", id)
return false, err
}
return true, nil
}
func (_ *Dashboard) TerraformResourceName() string {
return "databricks_dashboard"
}

View File

@ -394,6 +394,16 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
} }
cur.ID = instance.Attributes.ID cur.ID = instance.Attributes.ID
config.Resources.Schemas[resource.Name] = cur config.Resources.Schemas[resource.Name] = cur
case "databricks_dashboard":
if config.Resources.Dashboards == nil {
config.Resources.Dashboards = make(map[string]*resources.Dashboard)
}
cur := config.Resources.Dashboards[resource.Name]
if cur == nil {
cur = &resources.Dashboard{ModifiedStatus: resources.ModifiedStatusDeleted}
}
cur.ID = instance.Attributes.ID
config.Resources.Dashboards[resource.Name] = cur
case "databricks_permissions": case "databricks_permissions":
case "databricks_grants": case "databricks_grants":
// Ignore; no need to pull these back into the configuration. // Ignore; no need to pull these back into the configuration.

View File

@ -0,0 +1,55 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
)
func convertDashboardResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
var err error
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourceDashboard{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "dashboard normalization diagnostic: %s", diag.Summary)
}
// Include "serialized_dashboard" field if "definition_path" is set.
if path, ok := vin.Get("definition_path").AsString(); ok {
vout, err = dyn.Set(vout, "serialized_dashboard", dyn.V(fmt.Sprintf("${file(\"%s\")}", path)))
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to set serialized_dashboard: %w", err)
}
}
return vout, nil
}
type DashboardConverter struct{}
func (DashboardConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertDashboardResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.Dashboard[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.DashboardId = fmt.Sprintf("${databricks_dashboard.%s.id}", key)
out.Permissions["dashboard_"+key] = permissions
}
return nil
}
func init() {
registerConverter("dashboards", DashboardConverter{})
}

View File

@ -0,0 +1,7 @@
package tfdyn
import "testing"
func TestConvertDashboard(t *testing.T) {
}

View File

@ -39,6 +39,10 @@ var levelsMap = map[string](map[string]string){
CAN_VIEW: "CAN_VIEW", CAN_VIEW: "CAN_VIEW",
CAN_RUN: "CAN_QUERY", CAN_RUN: "CAN_QUERY",
}, },
"dashboards": {
CAN_MANAGE: "CAN_MANAGE",
CAN_VIEW: "CAN_READ",
},
} }
type bundlePermissions struct{} type bundlePermissions struct{}

View File

@ -2,11 +2,14 @@ package acc
import ( import (
"context" "context"
"fmt"
"os" "os"
"testing" "testing"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -94,3 +97,30 @@ func (t *WorkspaceT) RunPython(code string) (string, error) {
require.True(t, ok, "unexpected type %T", results.Data) require.True(t, ok, "unexpected type %T", results.Data)
return output, nil return output, nil
} }
func (t *WorkspaceT) TemporaryWorkspaceDir(name ...string) string {
ctx := context.Background()
me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName(name...))
t.Logf("Creating %s", basePath)
err = t.W.Workspace.MkdirsByPath(ctx, basePath)
require.NoError(t, err)
// Remove test directory on test completion.
t.Cleanup(func() {
t.Logf("Removing %s", basePath)
err := t.W.Workspace.Delete(ctx, workspace.Delete{
Path: basePath,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err)
})
return basePath
}

View File

@ -0,0 +1,110 @@
package test
import (
"encoding/base64"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Verify that importing a dashboard through the Workspace API retains the identity of the underying resource,
// as well as properties exclusively accessible through the dashboards API.
func TestDashboardAssumptions_WorkspaceImport(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
t.Parallel()
dashboardName := "New Dashboard"
dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`)
warehouseId := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-")
dashboard, err := wt.W.Lakeview.Create(ctx, dashboards.CreateDashboardRequest{
DisplayName: dashboardName,
ParentPath: dir,
SerializedDashboard: string(dashboardPayload),
WarehouseId: warehouseId,
})
require.NoError(t, err)
t.Logf("Dashboard ID (per Lakeview API): %s", dashboard.DashboardId)
// Overwrite the dashboard via the workspace API.
{
err := wt.W.Workspace.Import(ctx, workspace.Import{
Format: workspace.ImportFormatAuto,
Path: dashboard.Path,
Content: base64.StdEncoding.EncodeToString(dashboardPayload),
Overwrite: true,
})
require.NoError(t, err)
}
// Cross-check consistency with the workspace object.
{
obj, err := wt.W.Workspace.GetStatusByPath(ctx, dashboard.Path)
require.NoError(t, err)
// Confirm that the resource ID included in the response is equal to the dashboard ID.
require.Equal(t, dashboard.DashboardId, obj.ResourceId)
t.Logf("Dashboard ID (per workspace object status): %s", obj.ResourceId)
}
// Try to overwrite the dashboard via the Lakeview API (and expect failure).
{
_, err := wt.W.Lakeview.Create(ctx, dashboards.CreateDashboardRequest{
DisplayName: dashboardName,
ParentPath: dir,
SerializedDashboard: string(dashboardPayload),
})
require.ErrorIs(t, err, apierr.ErrResourceAlreadyExists)
}
// Retrieve the dashboard object and confirm that only select fields were updated by the import.
{
obj, err := wt.W.Lakeview.Get(ctx, dashboards.GetDashboardRequest{
DashboardId: dashboard.DashboardId,
})
require.NoError(t, err)
// Convert the dashboard object to a [dyn.Value] to make comparison easier.
previous, err := convert.FromTyped(dashboard, dyn.NilValue)
require.NoError(t, err)
current, err := convert.FromTyped(obj, dyn.NilValue)
require.NoError(t, err)
// Collect updated paths.
var updatedFieldPaths []string
_, err = merge.Override(previous, current, merge.OverrideVisitor{
VisitDelete: func(basePath dyn.Path, left dyn.Value) error {
assert.Fail(t, "unexpected delete operation")
return nil
},
VisitInsert: func(basePath dyn.Path, right dyn.Value) (dyn.Value, error) {
assert.Fail(t, "unexpected insert operation")
return right, nil
},
VisitUpdate: func(basePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) {
updatedFieldPaths = append(updatedFieldPaths, basePath.String())
return right, nil
},
})
require.NoError(t, err)
// Confirm that only the expected fields have been updated.
assert.ElementsMatch(t, []string{
"etag",
"serialized_dashboard",
"update_time",
}, updatedFieldPaths)
}
}