mirror of https://github.com/databricks/cli.git
Add support for AI/BI dashboards (#1743)
## Changes This change adds support for modeling [AI/BI dashboards][docs] in DABs. [Example bundle configuration][example] is located in the `bundle-examples` repository. [docs]: https://docs.databricks.com/en/dashboards/index.html#dashboards [example]: https://github.com/databricks/bundle-examples/tree/main/knowledge_base/dashboard_nyc_taxi ## Tests * Added unit tests for self-contained parts * Integration test for e2e dashboard deployment and remote change modification
This commit is contained in:
parent
9c96f006c4
commit
11f75fd320
|
@ -212,6 +212,15 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Dashboards: Prefix
|
||||||
|
for key, dashboard := range r.Dashboards {
|
||||||
|
if dashboard == nil || dashboard.CreateDashboardRequest == nil {
|
||||||
|
diags = diags.Extend(diag.Errorf("dashboard %s s is not defined", key))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
dashboard.DisplayName = prefix + dashboard.DisplayName
|
||||||
|
}
|
||||||
|
|
||||||
return diags
|
return diags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
type configureDashboardDefaults struct{}
|
||||||
|
|
||||||
|
func ConfigureDashboardDefaults() bundle.Mutator {
|
||||||
|
return &configureDashboardDefaults{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *configureDashboardDefaults) Name() string {
|
||||||
|
return "ConfigureDashboardDefaults"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *configureDashboardDefaults) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
|
pattern := dyn.NewPattern(
|
||||||
|
dyn.Key("resources"),
|
||||||
|
dyn.Key("dashboards"),
|
||||||
|
dyn.AnyKey(),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Configure defaults for all dashboards.
|
||||||
|
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
var err error
|
||||||
|
v, err = setIfNotExists(v, dyn.NewPath(dyn.Key("parent_path")), dyn.V(b.Config.Workspace.ResourcePath))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, err
|
||||||
|
}
|
||||||
|
v, err = setIfNotExists(v, dyn.NewPath(dyn.Key("embed_credentials")), dyn.V(false))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, err
|
||||||
|
}
|
||||||
|
return v, nil
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
diags = diags.Extend(diag.FromErr(err))
|
||||||
|
return diags
|
||||||
|
}
|
||||||
|
|
||||||
|
func setIfNotExists(v dyn.Value, path dyn.Path, defaultValue dyn.Value) (dyn.Value, error) {
|
||||||
|
// Get the field at the specified path (if set).
|
||||||
|
_, err := dyn.GetByPath(v, path)
|
||||||
|
switch {
|
||||||
|
case dyn.IsNoSuchKeyError(err):
|
||||||
|
// OK, we'll set the default value.
|
||||||
|
break
|
||||||
|
case dyn.IsCannotTraverseNilError(err):
|
||||||
|
// Cannot traverse the value, skip it.
|
||||||
|
return v, nil
|
||||||
|
case err == nil:
|
||||||
|
// The field is set, skip it.
|
||||||
|
return v, nil
|
||||||
|
default:
|
||||||
|
// Return the error.
|
||||||
|
return v, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the field at the specified path.
|
||||||
|
return dyn.SetByPath(v, path, defaultValue)
|
||||||
|
}
|
|
@ -0,0 +1,130 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConfigureDashboardDefaultsParentPath(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
ResourcePath: "/foo/bar",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"d1": {
|
||||||
|
// Empty string is skipped.
|
||||||
|
// See below for how it is set.
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
ParentPath: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"d2": {
|
||||||
|
// Non-empty string is skipped.
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
ParentPath: "already-set",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"d3": {
|
||||||
|
// No parent path set.
|
||||||
|
},
|
||||||
|
"d4": nil,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// We can't set an empty string in the typed configuration.
|
||||||
|
// Do it on the dyn.Value directly.
|
||||||
|
bundletest.Mutate(t, b, func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.Set(v, "resources.dashboards.d1.parent_path", dyn.V(""))
|
||||||
|
})
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, mutator.ConfigureDashboardDefaults())
|
||||||
|
require.NoError(t, diags.Error())
|
||||||
|
|
||||||
|
var v dyn.Value
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Set to empty string; unchanged.
|
||||||
|
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d1.parent_path")
|
||||||
|
if assert.NoError(t, err) {
|
||||||
|
assert.Equal(t, "", v.MustString())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set to "already-set"; unchanged.
|
||||||
|
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d2.parent_path")
|
||||||
|
if assert.NoError(t, err) {
|
||||||
|
assert.Equal(t, "already-set", v.MustString())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not set; now set to the workspace resource path.
|
||||||
|
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d3.parent_path")
|
||||||
|
if assert.NoError(t, err) {
|
||||||
|
assert.Equal(t, "/foo/bar", v.MustString())
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid dashboard; no change.
|
||||||
|
_, err = dyn.Get(b.Config.Value(), "resources.dashboards.d4.parent_path")
|
||||||
|
assert.True(t, dyn.IsCannotTraverseNilError(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConfigureDashboardDefaultsEmbedCredentials(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"d1": {
|
||||||
|
EmbedCredentials: true,
|
||||||
|
},
|
||||||
|
"d2": {
|
||||||
|
EmbedCredentials: false,
|
||||||
|
},
|
||||||
|
"d3": {
|
||||||
|
// No parent path set.
|
||||||
|
},
|
||||||
|
"d4": nil,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, mutator.ConfigureDashboardDefaults())
|
||||||
|
require.NoError(t, diags.Error())
|
||||||
|
|
||||||
|
var v dyn.Value
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Set to true; still true.
|
||||||
|
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d1.embed_credentials")
|
||||||
|
if assert.NoError(t, err) {
|
||||||
|
assert.Equal(t, true, v.MustBool())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set to false; still false.
|
||||||
|
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d2.embed_credentials")
|
||||||
|
if assert.NoError(t, err) {
|
||||||
|
assert.Equal(t, false, v.MustBool())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not set; now false.
|
||||||
|
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d3.embed_credentials")
|
||||||
|
if assert.NoError(t, err) {
|
||||||
|
assert.Equal(t, false, v.MustBool())
|
||||||
|
}
|
||||||
|
|
||||||
|
// No valid dashboard; no change.
|
||||||
|
_, err = dyn.Get(b.Config.Value(), "resources.dashboards.d4.embed_credentials")
|
||||||
|
assert.True(t, dyn.IsCannotTraverseNilError(err))
|
||||||
|
}
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/databricks/databricks-sdk-go/service/ml"
|
"github.com/databricks/databricks-sdk-go/service/ml"
|
||||||
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
|
@ -85,6 +86,14 @@ func TestInitializeURLs(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"dashboard1": {
|
||||||
|
ID: "01ef8d56871e1d50ae30ce7375e42478",
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "My special dashboard",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -99,6 +108,7 @@ func TestInitializeURLs(t *testing.T) {
|
||||||
"qualityMonitor1": "https://mycompany.databricks.com/explore/data/catalog/schema/qualityMonitor1?o=123456",
|
"qualityMonitor1": "https://mycompany.databricks.com/explore/data/catalog/schema/qualityMonitor1?o=123456",
|
||||||
"schema1": "https://mycompany.databricks.com/explore/data/catalog/schema?o=123456",
|
"schema1": "https://mycompany.databricks.com/explore/data/catalog/schema?o=123456",
|
||||||
"cluster1": "https://mycompany.databricks.com/compute/clusters/1017-103929-vlr7jzcf?o=123456",
|
"cluster1": "https://mycompany.databricks.com/compute/clusters/1017-103929-vlr7jzcf?o=123456",
|
||||||
|
"dashboard1": "https://mycompany.databricks.com/dashboardsv3/01ef8d56871e1d50ae30ce7375e42478/published?o=123456",
|
||||||
}
|
}
|
||||||
|
|
||||||
initializeForWorkspace(b, "123456", "https://mycompany.databricks.com/")
|
initializeForWorkspace(b, "123456", "https://mycompany.databricks.com/")
|
||||||
|
|
|
@ -14,6 +14,7 @@ import (
|
||||||
sdkconfig "github.com/databricks/databricks-sdk-go/config"
|
sdkconfig "github.com/databricks/databricks-sdk-go/config"
|
||||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/databricks/databricks-sdk-go/service/ml"
|
"github.com/databricks/databricks-sdk-go/service/ml"
|
||||||
|
@ -123,6 +124,13 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
|
||||||
Clusters: map[string]*resources.Cluster{
|
Clusters: map[string]*resources.Cluster{
|
||||||
"cluster1": {ClusterSpec: &compute.ClusterSpec{ClusterName: "cluster1", SparkVersion: "13.2.x", NumWorkers: 1}},
|
"cluster1": {ClusterSpec: &compute.ClusterSpec{ClusterName: "cluster1", SparkVersion: "13.2.x", NumWorkers: 1}},
|
||||||
},
|
},
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"dashboard1": {
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "dashboard1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
// Use AWS implementation for testing.
|
// Use AWS implementation for testing.
|
||||||
|
@ -184,6 +192,9 @@ func TestProcessTargetModeDevelopment(t *testing.T) {
|
||||||
|
|
||||||
// Clusters
|
// Clusters
|
||||||
assert.Equal(t, "[dev lennart] cluster1", b.Config.Resources.Clusters["cluster1"].ClusterName)
|
assert.Equal(t, "[dev lennart] cluster1", b.Config.Resources.Clusters["cluster1"].ClusterName)
|
||||||
|
|
||||||
|
// Dashboards
|
||||||
|
assert.Equal(t, "[dev lennart] dashboard1", b.Config.Resources.Dashboards["dashboard1"].DisplayName)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) {
|
func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) {
|
||||||
|
|
|
@ -110,6 +110,16 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Dashboards do not support run_as in the API.
|
||||||
|
if len(b.Config.Resources.Dashboards) > 0 {
|
||||||
|
diags = diags.Extend(reportRunAsNotSupported(
|
||||||
|
"dashboards",
|
||||||
|
b.Config.GetLocation("resources.dashboards"),
|
||||||
|
b.Config.Workspace.CurrentUser.UserName,
|
||||||
|
identity,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
return diags
|
return diags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,7 @@ func allResourceTypes(t *testing.T) []string {
|
||||||
// also update this check when adding a new resource
|
// also update this check when adding a new resource
|
||||||
require.Equal(t, []string{
|
require.Equal(t, []string{
|
||||||
"clusters",
|
"clusters",
|
||||||
|
"dashboards",
|
||||||
"experiments",
|
"experiments",
|
||||||
"jobs",
|
"jobs",
|
||||||
"model_serving_endpoints",
|
"model_serving_endpoints",
|
||||||
|
@ -188,6 +189,7 @@ func TestRunAsErrorForUnsupportedResources(t *testing.T) {
|
||||||
Config: *r,
|
Config: *r,
|
||||||
}
|
}
|
||||||
diags := bundle.Apply(context.Background(), b, SetRunAs())
|
diags := bundle.Apply(context.Background(), b, SetRunAs())
|
||||||
|
require.Error(t, diags.Error())
|
||||||
assert.Contains(t, diags.Error().Error(), "do not support a setting a run_as user that is different from the owner.\n"+
|
assert.Contains(t, diags.Error().Error(), "do not support a setting a run_as user that is different from the owner.\n"+
|
||||||
"Current identity: alice. Run as identity: bob.\n"+
|
"Current identity: alice. Run as identity: bob.\n"+
|
||||||
"See https://docs.databricks.com/dev-tools/bundles/run-as.html to learn more about the run_as property.", rt)
|
"See https://docs.databricks.com/dev-tools/bundles/run-as.html to learn more about the run_as property.", rt)
|
||||||
|
|
|
@ -162,6 +162,20 @@ func (t *translateContext) translateNoOp(literal, localFullPath, localRelPath, r
|
||||||
return localRelPath, nil
|
return localRelPath, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *translateContext) retainLocalAbsoluteFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||||
|
info, err := t.b.SyncRoot.Stat(localRelPath)
|
||||||
|
if errors.Is(err, fs.ErrNotExist) {
|
||||||
|
return "", fmt.Errorf("file %s not found", literal)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("unable to determine if %s is a file: %w", localFullPath, err)
|
||||||
|
}
|
||||||
|
if info.IsDir() {
|
||||||
|
return "", fmt.Errorf("expected %s to be a file but found a directory", literal)
|
||||||
|
}
|
||||||
|
return localFullPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||||
if !strings.HasPrefix(localRelPath, ".") {
|
if !strings.HasPrefix(localRelPath, ".") {
|
||||||
localRelPath = "." + string(filepath.Separator) + localRelPath
|
localRelPath = "." + string(filepath.Separator) + localRelPath
|
||||||
|
@ -215,6 +229,7 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
|
||||||
t.applyJobTranslations,
|
t.applyJobTranslations,
|
||||||
t.applyPipelineTranslations,
|
t.applyPipelineTranslations,
|
||||||
t.applyArtifactTranslations,
|
t.applyArtifactTranslations,
|
||||||
|
t.applyDashboardTranslations,
|
||||||
} {
|
} {
|
||||||
v, err = fn(v)
|
v, err = fn(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t *translateContext) applyDashboardTranslations(v dyn.Value) (dyn.Value, error) {
|
||||||
|
// Convert the `file_path` field to a local absolute path.
|
||||||
|
// We load the file at this path and use its contents for the dashboard contents.
|
||||||
|
pattern := dyn.NewPattern(
|
||||||
|
dyn.Key("resources"),
|
||||||
|
dyn.Key("dashboards"),
|
||||||
|
dyn.AnyKey(),
|
||||||
|
dyn.Key("file_path"),
|
||||||
|
)
|
||||||
|
|
||||||
|
return dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
key := p[2].Key()
|
||||||
|
dir, err := v.Location().Directory()
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for dashboard %s: %w", key, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return t.rewriteRelativeTo(p, v, t.retainLocalAbsoluteFilePath, dir, "")
|
||||||
|
})
|
||||||
|
}
|
|
@ -21,6 +21,7 @@ type Resources struct {
|
||||||
QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"`
|
QualityMonitors map[string]*resources.QualityMonitor `json:"quality_monitors,omitempty"`
|
||||||
Schemas map[string]*resources.Schema `json:"schemas,omitempty"`
|
Schemas map[string]*resources.Schema `json:"schemas,omitempty"`
|
||||||
Clusters map[string]*resources.Cluster `json:"clusters,omitempty"`
|
Clusters map[string]*resources.Cluster `json:"clusters,omitempty"`
|
||||||
|
Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ConfigResource interface {
|
type ConfigResource interface {
|
||||||
|
@ -77,6 +78,7 @@ func (r *Resources) AllResources() []ResourceGroup {
|
||||||
collectResourceMap(descriptions["quality_monitors"], r.QualityMonitors),
|
collectResourceMap(descriptions["quality_monitors"], r.QualityMonitors),
|
||||||
collectResourceMap(descriptions["schemas"], r.Schemas),
|
collectResourceMap(descriptions["schemas"], r.Schemas),
|
||||||
collectResourceMap(descriptions["clusters"], r.Clusters),
|
collectResourceMap(descriptions["clusters"], r.Clusters),
|
||||||
|
collectResourceMap(descriptions["dashboards"], r.Dashboards),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,5 +177,11 @@ func SupportedResources() map[string]ResourceDescription {
|
||||||
SingularTitle: "Cluster",
|
SingularTitle: "Cluster",
|
||||||
PluralTitle: "Clusters",
|
PluralTitle: "Clusters",
|
||||||
},
|
},
|
||||||
|
"dashboards": {
|
||||||
|
SingularName: "dashboard",
|
||||||
|
PluralName: "dashboards",
|
||||||
|
SingularTitle: "Dashboard",
|
||||||
|
PluralTitle: "Dashboards",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
package resources
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/log"
|
||||||
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
"github.com/databricks/databricks-sdk-go/marshal"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Dashboard struct {
|
||||||
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
|
*dashboards.CreateDashboardRequest
|
||||||
|
|
||||||
|
// =========================
|
||||||
|
// === Additional fields ===
|
||||||
|
// =========================
|
||||||
|
|
||||||
|
// SerializedDashboard holds the contents of the dashboard in serialized JSON form.
|
||||||
|
// We override the field's type from the SDK struct here to allow for inlining as YAML.
|
||||||
|
// If the value is a string, it is used as is.
|
||||||
|
// If it is not a string, its contents is marshalled as JSON.
|
||||||
|
SerializedDashboard any `json:"serialized_dashboard,omitempty"`
|
||||||
|
|
||||||
|
// EmbedCredentials is a flag to indicate if the publisher's credentials should
|
||||||
|
// be embedded in the published dashboard. These embedded credentials will be used
|
||||||
|
// to execute the published dashboard's queries.
|
||||||
|
//
|
||||||
|
// Defaults to false if not set.
|
||||||
|
EmbedCredentials bool `json:"embed_credentials,omitempty"`
|
||||||
|
|
||||||
|
// FilePath points to the local `.lvdash.json` file containing the dashboard definition.
|
||||||
|
FilePath string `json:"file_path,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Dashboard) UnmarshalJSON(b []byte) error {
|
||||||
|
return marshal.Unmarshal(b, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r Dashboard) MarshalJSON() ([]byte, error) {
|
||||||
|
return marshal.Marshal(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*Dashboard) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
||||||
|
_, err := w.Lakeview.Get(ctx, dashboards.GetDashboardRequest{
|
||||||
|
DashboardId: id,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Debugf(ctx, "dashboard %s does not exist", id)
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*Dashboard) TerraformResourceName() string {
|
||||||
|
return "databricks_dashboard"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Dashboard) InitializeURL(baseURL url.URL) {
|
||||||
|
if r.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
baseURL.Path = fmt.Sprintf("dashboardsv3/%s/published", r.ID)
|
||||||
|
r.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Dashboard) GetName() string {
|
||||||
|
return r.DisplayName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Dashboard) GetURL() string {
|
||||||
|
return r.URL
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
package terraform
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
tfjson "github.com/hashicorp/terraform-json"
|
||||||
|
)
|
||||||
|
|
||||||
|
type dashboardState struct {
|
||||||
|
Name string
|
||||||
|
ID string
|
||||||
|
ETag string
|
||||||
|
}
|
||||||
|
|
||||||
|
func collectDashboardsFromState(ctx context.Context, b *bundle.Bundle) ([]dashboardState, error) {
|
||||||
|
state, err := ParseResourcesState(ctx, b)
|
||||||
|
if err != nil && state == nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var dashboards []dashboardState
|
||||||
|
for _, resource := range state.Resources {
|
||||||
|
if resource.Mode != tfjson.ManagedResourceMode {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, instance := range resource.Instances {
|
||||||
|
switch resource.Type {
|
||||||
|
case "databricks_dashboard":
|
||||||
|
dashboards = append(dashboards, dashboardState{
|
||||||
|
Name: resource.Name,
|
||||||
|
ID: instance.Attributes.ID,
|
||||||
|
ETag: instance.Attributes.ETag,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dashboards, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type checkDashboardsModifiedRemotely struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *checkDashboardsModifiedRemotely) Name() string {
|
||||||
|
return "CheckDashboardsModifiedRemotely"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *checkDashboardsModifiedRemotely) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
// This mutator is relevant only if the bundle includes dashboards.
|
||||||
|
if len(b.Config.Resources.Dashboards) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the user has forced the deployment, skip this check.
|
||||||
|
if b.Config.Bundle.Force {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
dashboards, err := collectDashboardsFromState(ctx, b)
|
||||||
|
if err != nil {
|
||||||
|
return diag.FromErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
for _, dashboard := range dashboards {
|
||||||
|
// Skip dashboards that are not defined in the bundle.
|
||||||
|
// These will be destroyed upon deployment.
|
||||||
|
if _, ok := b.Config.Resources.Dashboards[dashboard.Name]; !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
path := dyn.MustPathFromString(fmt.Sprintf("resources.dashboards.%s", dashboard.Name))
|
||||||
|
loc := b.Config.GetLocation(path.String())
|
||||||
|
actual, err := b.WorkspaceClient().Lakeview.GetByDashboardId(ctx, dashboard.ID)
|
||||||
|
if err != nil {
|
||||||
|
diags = diags.Append(diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: fmt.Sprintf("failed to get dashboard %q", dashboard.Name),
|
||||||
|
Detail: err.Error(),
|
||||||
|
Paths: []dyn.Path{path},
|
||||||
|
Locations: []dyn.Location{loc},
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the ETag is the same, the dashboard has not been modified.
|
||||||
|
if actual.Etag == dashboard.ETag {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
diags = diags.Append(diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: fmt.Sprintf("dashboard %q has been modified remotely", dashboard.Name),
|
||||||
|
Detail: "" +
|
||||||
|
"This dashboard has been modified remotely since the last bundle deployment.\n" +
|
||||||
|
"These modifications are untracked and will be overwritten on deploy.\n" +
|
||||||
|
"\n" +
|
||||||
|
"Make sure that the local dashboard definition matches what you intend to deploy\n" +
|
||||||
|
"before proceeding with the deployment.\n" +
|
||||||
|
"\n" +
|
||||||
|
"Run `databricks bundle deploy --force` to bypass this error." +
|
||||||
|
"",
|
||||||
|
Paths: []dyn.Path{path},
|
||||||
|
Locations: []dyn.Location{loc},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return diags
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckDashboardsModifiedRemotely() *checkDashboardsModifiedRemotely {
|
||||||
|
return &checkDashboardsModifiedRemotely{}
|
||||||
|
}
|
|
@ -0,0 +1,191 @@
|
||||||
|
package terraform
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/internal/testutil"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/databricks-sdk-go/experimental/mocks"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func mockDashboardBundle(t *testing.T) *bundle.Bundle {
|
||||||
|
dir := t.TempDir()
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
BundleRootPath: dir,
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Target: "test",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"dash1": {
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "My Special Dashboard",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckDashboardsModifiedRemotely_NoDashboards(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
BundleRootPath: dir,
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Target: "test",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, CheckDashboardsModifiedRemotely())
|
||||||
|
assert.Empty(t, diags)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckDashboardsModifiedRemotely_FirstDeployment(t *testing.T) {
|
||||||
|
b := mockDashboardBundle(t)
|
||||||
|
diags := bundle.Apply(context.Background(), b, CheckDashboardsModifiedRemotely())
|
||||||
|
assert.Empty(t, diags)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckDashboardsModifiedRemotely_ExistingStateNoChange(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
b := mockDashboardBundle(t)
|
||||||
|
writeFakeDashboardState(t, ctx, b)
|
||||||
|
|
||||||
|
// Mock the call to the API.
|
||||||
|
m := mocks.NewMockWorkspaceClient(t)
|
||||||
|
dashboardsAPI := m.GetMockLakeviewAPI()
|
||||||
|
dashboardsAPI.EXPECT().
|
||||||
|
GetByDashboardId(mock.Anything, "id1").
|
||||||
|
Return(&dashboards.Dashboard{
|
||||||
|
DisplayName: "My Special Dashboard",
|
||||||
|
Etag: "1000",
|
||||||
|
}, nil).
|
||||||
|
Once()
|
||||||
|
b.SetWorkpaceClient(m.WorkspaceClient)
|
||||||
|
|
||||||
|
// No changes, so no diags.
|
||||||
|
diags := bundle.Apply(ctx, b, CheckDashboardsModifiedRemotely())
|
||||||
|
assert.Empty(t, diags)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckDashboardsModifiedRemotely_ExistingStateChange(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
b := mockDashboardBundle(t)
|
||||||
|
writeFakeDashboardState(t, ctx, b)
|
||||||
|
|
||||||
|
// Mock the call to the API.
|
||||||
|
m := mocks.NewMockWorkspaceClient(t)
|
||||||
|
dashboardsAPI := m.GetMockLakeviewAPI()
|
||||||
|
dashboardsAPI.EXPECT().
|
||||||
|
GetByDashboardId(mock.Anything, "id1").
|
||||||
|
Return(&dashboards.Dashboard{
|
||||||
|
DisplayName: "My Special Dashboard",
|
||||||
|
Etag: "1234",
|
||||||
|
}, nil).
|
||||||
|
Once()
|
||||||
|
b.SetWorkpaceClient(m.WorkspaceClient)
|
||||||
|
|
||||||
|
// The dashboard has changed, so expect an error.
|
||||||
|
diags := bundle.Apply(ctx, b, CheckDashboardsModifiedRemotely())
|
||||||
|
if assert.Len(t, diags, 1) {
|
||||||
|
assert.Equal(t, diag.Error, diags[0].Severity)
|
||||||
|
assert.Equal(t, `dashboard "dash1" has been modified remotely`, diags[0].Summary)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckDashboardsModifiedRemotely_ExistingStateFailureToGet(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
b := mockDashboardBundle(t)
|
||||||
|
writeFakeDashboardState(t, ctx, b)
|
||||||
|
|
||||||
|
// Mock the call to the API.
|
||||||
|
m := mocks.NewMockWorkspaceClient(t)
|
||||||
|
dashboardsAPI := m.GetMockLakeviewAPI()
|
||||||
|
dashboardsAPI.EXPECT().
|
||||||
|
GetByDashboardId(mock.Anything, "id1").
|
||||||
|
Return(nil, fmt.Errorf("failure")).
|
||||||
|
Once()
|
||||||
|
b.SetWorkpaceClient(m.WorkspaceClient)
|
||||||
|
|
||||||
|
// Unable to get the dashboard, so expect an error.
|
||||||
|
diags := bundle.Apply(ctx, b, CheckDashboardsModifiedRemotely())
|
||||||
|
if assert.Len(t, diags, 1) {
|
||||||
|
assert.Equal(t, diag.Error, diags[0].Severity)
|
||||||
|
assert.Equal(t, `failed to get dashboard "dash1"`, diags[0].Summary)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle) {
|
||||||
|
tfDir, err := Dir(ctx, b)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Write fake state file.
|
||||||
|
testutil.WriteFile(t, `
|
||||||
|
{
|
||||||
|
"version": 4,
|
||||||
|
"terraform_version": "1.5.5",
|
||||||
|
"resources": [
|
||||||
|
{
|
||||||
|
"mode": "managed",
|
||||||
|
"type": "databricks_dashboard",
|
||||||
|
"name": "dash1",
|
||||||
|
"instances": [
|
||||||
|
{
|
||||||
|
"schema_version": 0,
|
||||||
|
"attributes": {
|
||||||
|
"etag": "1000",
|
||||||
|
"id": "id1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"mode": "managed",
|
||||||
|
"type": "databricks_job",
|
||||||
|
"name": "job",
|
||||||
|
"instances": [
|
||||||
|
{
|
||||||
|
"schema_version": 0,
|
||||||
|
"attributes": {
|
||||||
|
"id": "1234"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"mode": "managed",
|
||||||
|
"type": "databricks_dashboard",
|
||||||
|
"name": "dash2",
|
||||||
|
"instances": [
|
||||||
|
{
|
||||||
|
"schema_version": 0,
|
||||||
|
"attributes": {
|
||||||
|
"etag": "1001",
|
||||||
|
"id": "id2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
`, filepath.Join(tfDir, TerraformStateFileName))
|
||||||
|
}
|
|
@ -176,6 +176,16 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
|
||||||
}
|
}
|
||||||
cur.ID = instance.Attributes.ID
|
cur.ID = instance.Attributes.ID
|
||||||
config.Resources.Clusters[resource.Name] = cur
|
config.Resources.Clusters[resource.Name] = cur
|
||||||
|
case "databricks_dashboard":
|
||||||
|
if config.Resources.Dashboards == nil {
|
||||||
|
config.Resources.Dashboards = make(map[string]*resources.Dashboard)
|
||||||
|
}
|
||||||
|
cur := config.Resources.Dashboards[resource.Name]
|
||||||
|
if cur == nil {
|
||||||
|
cur = &resources.Dashboard{ModifiedStatus: resources.ModifiedStatusDeleted}
|
||||||
|
}
|
||||||
|
cur.ID = instance.Attributes.ID
|
||||||
|
config.Resources.Dashboards[resource.Name] = cur
|
||||||
case "databricks_permissions":
|
case "databricks_permissions":
|
||||||
case "databricks_grants":
|
case "databricks_grants":
|
||||||
// Ignore; no need to pull these back into the configuration.
|
// Ignore; no need to pull these back into the configuration.
|
||||||
|
@ -230,6 +240,11 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
|
||||||
src.ModifiedStatus = resources.ModifiedStatusCreated
|
src.ModifiedStatus = resources.ModifiedStatusCreated
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
for _, src := range config.Resources.Dashboards {
|
||||||
|
if src.ModifiedStatus == "" && src.ID == "" {
|
||||||
|
src.ModifiedStatus = resources.ModifiedStatusCreated
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
"github.com/databricks/cli/libs/dyn/convert"
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/databricks/databricks-sdk-go/service/ml"
|
"github.com/databricks/databricks-sdk-go/service/ml"
|
||||||
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
|
@ -677,6 +678,14 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
|
||||||
{Attributes: stateInstanceAttributes{ID: "1"}},
|
{Attributes: stateInstanceAttributes{ID: "1"}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Type: "databricks_dashboard",
|
||||||
|
Mode: "managed",
|
||||||
|
Name: "test_dashboard",
|
||||||
|
Instances: []stateResourceInstance{
|
||||||
|
{Attributes: stateInstanceAttributes{ID: "1"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
err := TerraformToBundle(&tfState, &config)
|
err := TerraformToBundle(&tfState, &config)
|
||||||
|
@ -709,6 +718,9 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
|
||||||
assert.Equal(t, "1", config.Resources.Clusters["test_cluster"].ID)
|
assert.Equal(t, "1", config.Resources.Clusters["test_cluster"].ID)
|
||||||
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Clusters["test_cluster"].ModifiedStatus)
|
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Clusters["test_cluster"].ModifiedStatus)
|
||||||
|
|
||||||
|
assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID)
|
||||||
|
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
|
||||||
|
|
||||||
AssertFullResourceCoverage(t, &config)
|
AssertFullResourceCoverage(t, &config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -778,6 +790,13 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"test_dashboard": {
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "test_dashboard",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
var tfState = resourcesState{
|
var tfState = resourcesState{
|
||||||
|
@ -813,6 +832,9 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
|
||||||
assert.Equal(t, "", config.Resources.Clusters["test_cluster"].ID)
|
assert.Equal(t, "", config.Resources.Clusters["test_cluster"].ID)
|
||||||
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Clusters["test_cluster"].ModifiedStatus)
|
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Clusters["test_cluster"].ModifiedStatus)
|
||||||
|
|
||||||
|
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID)
|
||||||
|
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
|
||||||
|
|
||||||
AssertFullResourceCoverage(t, &config)
|
AssertFullResourceCoverage(t, &config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -927,6 +949,18 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Dashboards: map[string]*resources.Dashboard{
|
||||||
|
"test_dashboard": {
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "test_dashboard",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"test_dashboard_new": {
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "test_dashboard_new",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
var tfState = resourcesState{
|
var tfState = resourcesState{
|
||||||
|
@ -1075,6 +1109,22 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
|
||||||
{Attributes: stateInstanceAttributes{ID: "2"}},
|
{Attributes: stateInstanceAttributes{ID: "2"}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Type: "databricks_dashboard",
|
||||||
|
Mode: "managed",
|
||||||
|
Name: "test_dashboard",
|
||||||
|
Instances: []stateResourceInstance{
|
||||||
|
{Attributes: stateInstanceAttributes{ID: "1"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "databricks_dashboard",
|
||||||
|
Mode: "managed",
|
||||||
|
Name: "test_dashboard_old",
|
||||||
|
Instances: []stateResourceInstance{
|
||||||
|
{Attributes: stateInstanceAttributes{ID: "2"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
err := TerraformToBundle(&tfState, &config)
|
err := TerraformToBundle(&tfState, &config)
|
||||||
|
@ -1143,6 +1193,13 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
|
||||||
assert.Equal(t, "", config.Resources.Clusters["test_cluster_new"].ID)
|
assert.Equal(t, "", config.Resources.Clusters["test_cluster_new"].ID)
|
||||||
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Clusters["test_cluster_new"].ModifiedStatus)
|
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Clusters["test_cluster_new"].ModifiedStatus)
|
||||||
|
|
||||||
|
assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID)
|
||||||
|
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
|
||||||
|
assert.Equal(t, "2", config.Resources.Dashboards["test_dashboard_old"].ID)
|
||||||
|
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard_old"].ModifiedStatus)
|
||||||
|
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID)
|
||||||
|
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus)
|
||||||
|
|
||||||
AssertFullResourceCoverage(t, &config)
|
AssertFullResourceCoverage(t, &config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -60,6 +60,8 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.D
|
||||||
path = dyn.NewPath(dyn.Key("databricks_schema")).Append(path[2:]...)
|
path = dyn.NewPath(dyn.Key("databricks_schema")).Append(path[2:]...)
|
||||||
case dyn.Key("clusters"):
|
case dyn.Key("clusters"):
|
||||||
path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...)
|
path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...)
|
||||||
|
case dyn.Key("dashboards"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_dashboard")).Append(path[2:]...)
|
||||||
default:
|
default:
|
||||||
// Trigger "key not found" for unknown resource types.
|
// Trigger "key not found" for unknown resource types.
|
||||||
return dyn.GetByPath(root, path)
|
return dyn.GetByPath(root, path)
|
||||||
|
|
|
@ -32,6 +32,7 @@ func TestInterpolate(t *testing.T) {
|
||||||
"other_registered_model": "${resources.registered_models.other_registered_model.id}",
|
"other_registered_model": "${resources.registered_models.other_registered_model.id}",
|
||||||
"other_schema": "${resources.schemas.other_schema.id}",
|
"other_schema": "${resources.schemas.other_schema.id}",
|
||||||
"other_cluster": "${resources.clusters.other_cluster.id}",
|
"other_cluster": "${resources.clusters.other_cluster.id}",
|
||||||
|
"other_dashboard": "${resources.dashboards.other_dashboard.id}",
|
||||||
},
|
},
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -69,6 +70,7 @@ func TestInterpolate(t *testing.T) {
|
||||||
assert.Equal(t, "${databricks_registered_model.other_registered_model.id}", j.Tags["other_registered_model"])
|
assert.Equal(t, "${databricks_registered_model.other_registered_model.id}", j.Tags["other_registered_model"])
|
||||||
assert.Equal(t, "${databricks_schema.other_schema.id}", j.Tags["other_schema"])
|
assert.Equal(t, "${databricks_schema.other_schema.id}", j.Tags["other_schema"])
|
||||||
assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"])
|
assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"])
|
||||||
|
assert.Equal(t, "${databricks_dashboard.other_dashboard.id}", j.Tags["other_dashboard"])
|
||||||
|
|
||||||
m := b.Config.Resources.Models["my_model"]
|
m := b.Config.Resources.Models["my_model"]
|
||||||
assert.Equal(t, "my_model", m.Model.Name)
|
assert.Equal(t, "my_model", m.Model.Name)
|
||||||
|
|
|
@ -0,0 +1,109 @@
|
||||||
|
package tfdyn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle/internal/tf/schema"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
|
"github.com/databricks/cli/libs/log"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
filePathFieldName = "file_path"
|
||||||
|
serializedDashboardFieldName = "serialized_dashboard"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Marshal "serialized_dashboard" as JSON if it is set in the input but not in the output.
|
||||||
|
func marshalSerializedDashboard(vin dyn.Value, vout dyn.Value) (dyn.Value, error) {
|
||||||
|
// Skip if the "serialized_dashboard" field is already set.
|
||||||
|
if v := vout.Get(serializedDashboardFieldName); v.IsValid() {
|
||||||
|
return vout, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip if the "serialized_dashboard" field on the input is not set.
|
||||||
|
v := vin.Get(serializedDashboardFieldName)
|
||||||
|
if !v.IsValid() {
|
||||||
|
return vout, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal the "serialized_dashboard" field as JSON.
|
||||||
|
data, err := json.Marshal(v.AsAny())
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf("failed to marshal serialized_dashboard: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the "serialized_dashboard" field on the output.
|
||||||
|
return dyn.Set(vout, serializedDashboardFieldName, dyn.V(string(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func convertDashboardResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// Normalize the output value to the target schema.
|
||||||
|
vout, diags := convert.Normalize(schema.ResourceDashboard{}, vin)
|
||||||
|
for _, diag := range diags {
|
||||||
|
log.Debugf(ctx, "dashboard normalization diagnostic: %s", diag.Summary)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Include "serialized_dashboard" field if "file_path" is set.
|
||||||
|
// Note: the Terraform resource supports "file_path" natively, but its
|
||||||
|
// change detection mechanism doesn't work as expected at the time of writing (Sep 30).
|
||||||
|
if path, ok := vout.Get(filePathFieldName).AsString(); ok {
|
||||||
|
vout, err = dyn.Set(vout, serializedDashboardFieldName, dyn.V(fmt.Sprintf("${file(%q)}", path)))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf("failed to set serialized_dashboard: %w", err)
|
||||||
|
}
|
||||||
|
// Drop the "file_path" field. It is mutually exclusive with "serialized_dashboard".
|
||||||
|
vout, err = dyn.Walk(vout, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
switch len(p) {
|
||||||
|
case 0:
|
||||||
|
return v, nil
|
||||||
|
case 1:
|
||||||
|
if p[0] == dyn.Key(filePathFieldName) {
|
||||||
|
return v, dyn.ErrDrop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip everything else.
|
||||||
|
return v, dyn.ErrSkip
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf("failed to drop file_path: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal "serialized_dashboard" as JSON if it is set in the input but not in the output.
|
||||||
|
vout, err = marshalSerializedDashboard(vin, vout)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return vout, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type dashboardConverter struct{}
|
||||||
|
|
||||||
|
func (dashboardConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
|
||||||
|
vout, err := convertDashboardResource(ctx, vin)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the converted resource to the output.
|
||||||
|
out.Dashboard[key] = vout.AsAny()
|
||||||
|
|
||||||
|
// Configure permissions for this resource.
|
||||||
|
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
|
||||||
|
permissions.DashboardId = fmt.Sprintf("${databricks_dashboard.%s.id}", key)
|
||||||
|
out.Permissions["dashboard_"+key] = permissions
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
registerConverter("dashboards", dashboardConverter{})
|
||||||
|
}
|
|
@ -0,0 +1,153 @@
|
||||||
|
package tfdyn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/tf/schema"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestConvertDashboard(t *testing.T) {
|
||||||
|
var src = resources.Dashboard{
|
||||||
|
CreateDashboardRequest: &dashboards.CreateDashboardRequest{
|
||||||
|
DisplayName: "my dashboard",
|
||||||
|
WarehouseId: "f00dcafe",
|
||||||
|
ParentPath: "/some/path",
|
||||||
|
},
|
||||||
|
|
||||||
|
EmbedCredentials: true,
|
||||||
|
|
||||||
|
Permissions: []resources.Permission{
|
||||||
|
{
|
||||||
|
Level: "CAN_VIEW",
|
||||||
|
UserName: "jane@doe.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vin, err := convert.FromTyped(src, dyn.NilValue)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
out := schema.NewResources()
|
||||||
|
err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Assert equality on the job
|
||||||
|
assert.Equal(t, map[string]any{
|
||||||
|
"display_name": "my dashboard",
|
||||||
|
"warehouse_id": "f00dcafe",
|
||||||
|
"parent_path": "/some/path",
|
||||||
|
"embed_credentials": true,
|
||||||
|
}, out.Dashboard["my_dashboard"])
|
||||||
|
|
||||||
|
// Assert equality on the permissions
|
||||||
|
assert.Equal(t, &schema.ResourcePermissions{
|
||||||
|
DashboardId: "${databricks_dashboard.my_dashboard.id}",
|
||||||
|
AccessControl: []schema.ResourcePermissionsAccessControl{
|
||||||
|
{
|
||||||
|
PermissionLevel: "CAN_VIEW",
|
||||||
|
UserName: "jane@doe.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, out.Permissions["dashboard_my_dashboard"])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConvertDashboardFilePath(t *testing.T) {
|
||||||
|
var src = resources.Dashboard{
|
||||||
|
FilePath: "some/path",
|
||||||
|
}
|
||||||
|
|
||||||
|
vin, err := convert.FromTyped(src, dyn.NilValue)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
out := schema.NewResources()
|
||||||
|
err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Assert that the "serialized_dashboard" is included.
|
||||||
|
assert.Subset(t, out.Dashboard["my_dashboard"], map[string]any{
|
||||||
|
"serialized_dashboard": "${file(\"some/path\")}",
|
||||||
|
})
|
||||||
|
|
||||||
|
// Assert that the "file_path" doesn't carry over.
|
||||||
|
assert.NotSubset(t, out.Dashboard["my_dashboard"], map[string]any{
|
||||||
|
"file_path": "some/path",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConvertDashboardFilePathQuoted(t *testing.T) {
|
||||||
|
var src = resources.Dashboard{
|
||||||
|
FilePath: `C:\foo\bar\baz\dashboard.lvdash.json`,
|
||||||
|
}
|
||||||
|
|
||||||
|
vin, err := convert.FromTyped(src, dyn.NilValue)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
out := schema.NewResources()
|
||||||
|
err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Assert that the "serialized_dashboard" is included.
|
||||||
|
assert.Subset(t, out.Dashboard["my_dashboard"], map[string]any{
|
||||||
|
"serialized_dashboard": `${file("C:\\foo\\bar\\baz\\dashboard.lvdash.json")}`,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Assert that the "file_path" doesn't carry over.
|
||||||
|
assert.NotSubset(t, out.Dashboard["my_dashboard"], map[string]any{
|
||||||
|
"file_path": `C:\foo\bar\baz\dashboard.lvdash.json`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConvertDashboardSerializedDashboardString(t *testing.T) {
|
||||||
|
var src = resources.Dashboard{
|
||||||
|
SerializedDashboard: `{ "json": true }`,
|
||||||
|
}
|
||||||
|
|
||||||
|
vin, err := convert.FromTyped(src, dyn.NilValue)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
out := schema.NewResources()
|
||||||
|
err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Assert that the "serialized_dashboard" is included.
|
||||||
|
assert.Subset(t, out.Dashboard["my_dashboard"], map[string]any{
|
||||||
|
"serialized_dashboard": `{ "json": true }`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConvertDashboardSerializedDashboardAny(t *testing.T) {
|
||||||
|
var src = resources.Dashboard{
|
||||||
|
SerializedDashboard: map[string]any{
|
||||||
|
"pages": []map[string]any{
|
||||||
|
{
|
||||||
|
"displayName": "New Page",
|
||||||
|
"layout": []map[string]any{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
vin, err := convert.FromTyped(src, dyn.NilValue)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
out := schema.NewResources()
|
||||||
|
err = dashboardConverter{}.Convert(ctx, "my_dashboard", vin, out)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Assert that the "serialized_dashboard" is included.
|
||||||
|
assert.Subset(t, out.Dashboard["my_dashboard"], map[string]any{
|
||||||
|
"serialized_dashboard": `{"pages":[{"displayName":"New Page","layout":[]}]}`,
|
||||||
|
})
|
||||||
|
}
|
|
@ -13,7 +13,7 @@ import (
|
||||||
|
|
||||||
// Partial representation of the Terraform state file format.
|
// Partial representation of the Terraform state file format.
|
||||||
// We are only interested global version and serial numbers,
|
// We are only interested global version and serial numbers,
|
||||||
// plus resource types, names, modes, and ids.
|
// plus resource types, names, modes, IDs, and ETags (for dashboards).
|
||||||
type resourcesState struct {
|
type resourcesState struct {
|
||||||
Version int `json:"version"`
|
Version int `json:"version"`
|
||||||
Resources []stateResource `json:"resources"`
|
Resources []stateResource `json:"resources"`
|
||||||
|
@ -33,7 +33,8 @@ type stateResourceInstance struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type stateInstanceAttributes struct {
|
type stateInstanceAttributes struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
|
ETag string `json:"etag,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseResourcesState(ctx context.Context, b *bundle.Bundle) (*resourcesState, error) {
|
func ParseResourcesState(ctx context.Context, b *bundle.Bundle) (*resourcesState, error) {
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
package bundletest
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Mutate(t *testing.T, b *bundle.Bundle, f func(v dyn.Value) (dyn.Value, error)) {
|
||||||
|
diags := bundle.ApplyFunc(context.Background(), b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
err := b.Config.Mutate(f)
|
||||||
|
require.NoError(t, err)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
require.NoError(t, diags.Error())
|
||||||
|
}
|
|
@ -39,6 +39,10 @@ var levelsMap = map[string](map[string]string){
|
||||||
CAN_VIEW: "CAN_VIEW",
|
CAN_VIEW: "CAN_VIEW",
|
||||||
CAN_RUN: "CAN_QUERY",
|
CAN_RUN: "CAN_QUERY",
|
||||||
},
|
},
|
||||||
|
"dashboards": {
|
||||||
|
CAN_MANAGE: "CAN_MANAGE",
|
||||||
|
CAN_VIEW: "CAN_READ",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
type bundlePermissions struct{}
|
type bundlePermissions struct{}
|
||||||
|
|
|
@ -152,6 +152,7 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
|
||||||
bundle.Defer(
|
bundle.Defer(
|
||||||
bundle.Seq(
|
bundle.Seq(
|
||||||
terraform.StatePull(),
|
terraform.StatePull(),
|
||||||
|
terraform.CheckDashboardsModifiedRemotely(),
|
||||||
deploy.StatePull(),
|
deploy.StatePull(),
|
||||||
mutator.ValidateGitDetails(),
|
mutator.ValidateGitDetails(),
|
||||||
artifacts.CleanUp(),
|
artifacts.CleanUp(),
|
||||||
|
|
|
@ -66,6 +66,7 @@ func Initialize() bundle.Mutator {
|
||||||
permissions.PermissionDiagnostics(),
|
permissions.PermissionDiagnostics(),
|
||||||
mutator.SetRunAs(),
|
mutator.SetRunAs(),
|
||||||
mutator.OverrideCompute(),
|
mutator.OverrideCompute(),
|
||||||
|
mutator.ConfigureDashboardDefaults(),
|
||||||
mutator.ProcessTargetMode(),
|
mutator.ProcessTargetMode(),
|
||||||
mutator.ApplyPresets(),
|
mutator.ApplyPresets(),
|
||||||
mutator.DefaultQueueing(),
|
mutator.DefaultQueueing(),
|
||||||
|
|
|
@ -180,6 +180,48 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"resources.Dashboard": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"display_name": {
|
||||||
|
"description": "The display name of the dashboard.",
|
||||||
|
"$ref": "#/$defs/string"
|
||||||
|
},
|
||||||
|
"embed_credentials": {
|
||||||
|
"$ref": "#/$defs/bool"
|
||||||
|
},
|
||||||
|
"file_path": {
|
||||||
|
"$ref": "#/$defs/string"
|
||||||
|
},
|
||||||
|
"parent_path": {
|
||||||
|
"description": "The workspace path of the folder containing the dashboard. Includes leading slash and no\ntrailing slash.\nThis field is excluded in List Dashboards responses.",
|
||||||
|
"$ref": "#/$defs/string"
|
||||||
|
},
|
||||||
|
"permissions": {
|
||||||
|
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
|
||||||
|
},
|
||||||
|
"serialized_dashboard": {
|
||||||
|
"description": "The contents of the dashboard in serialized string form.\nThis field is excluded in List Dashboards responses.\nUse the [get dashboard API](https://docs.databricks.com/api/workspace/lakeview/get)\nto retrieve an example response, which includes the `serialized_dashboard` field.\nThis field provides the structure of the JSON string that represents the dashboard's\nlayout and components.",
|
||||||
|
"$ref": "#/$defs/interface"
|
||||||
|
},
|
||||||
|
"warehouse_id": {
|
||||||
|
"description": "The warehouse ID used to run the dashboard.",
|
||||||
|
"$ref": "#/$defs/string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false,
|
||||||
|
"required": [
|
||||||
|
"display_name"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"resources.Grant": {
|
"resources.Grant": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
|
@ -1054,6 +1096,9 @@
|
||||||
"clusters": {
|
"clusters": {
|
||||||
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster"
|
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster"
|
||||||
},
|
},
|
||||||
|
"dashboards": {
|
||||||
|
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard"
|
||||||
|
},
|
||||||
"experiments": {
|
"experiments": {
|
||||||
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment"
|
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment"
|
||||||
},
|
},
|
||||||
|
@ -5292,6 +5337,20 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"resources.Dashboard": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Dashboard"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"resources.Job": {
|
"resources.Job": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{
|
{
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"properties": {
|
||||||
|
"unique_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Unique ID for job name"
|
||||||
|
},
|
||||||
|
"warehouse_id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The SQL warehouse ID to use for the dashboard"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
{
|
||||||
|
"pages": [
|
||||||
|
{
|
||||||
|
"displayName": "New Page",
|
||||||
|
"layout": [
|
||||||
|
{
|
||||||
|
"position": {
|
||||||
|
"height": 2,
|
||||||
|
"width": 6,
|
||||||
|
"x": 0,
|
||||||
|
"y": 0
|
||||||
|
},
|
||||||
|
"widget": {
|
||||||
|
"name": "82eb9107",
|
||||||
|
"textbox_spec": "# I'm a title"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"position": {
|
||||||
|
"height": 2,
|
||||||
|
"width": 6,
|
||||||
|
"x": 0,
|
||||||
|
"y": 2
|
||||||
|
},
|
||||||
|
"widget": {
|
||||||
|
"name": "ffa6de4f",
|
||||||
|
"textbox_spec": "Text"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "fdd21a3c"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
bundle:
|
||||||
|
name: dashboards
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
root_path: "~/.bundle/{{.unique_id}}"
|
||||||
|
|
||||||
|
resources:
|
||||||
|
dashboards:
|
||||||
|
file_reference:
|
||||||
|
display_name: test-dashboard-{{.unique_id}}
|
||||||
|
file_path: ./dashboard.lvdash.json
|
||||||
|
warehouse_id: {{.warehouse_id}}
|
|
@ -0,0 +1,63 @@
|
||||||
|
package bundle
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/internal/acc"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/workspace"
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAccDashboards(t *testing.T) {
|
||||||
|
ctx, wt := acc.WorkspaceTest(t)
|
||||||
|
|
||||||
|
warehouseID := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
|
||||||
|
uniqueID := uuid.New().String()
|
||||||
|
root, err := initTestTemplate(t, ctx, "dashboards", map[string]any{
|
||||||
|
"unique_id": uniqueID,
|
||||||
|
"warehouse_id": warehouseID,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
t.Cleanup(func() {
|
||||||
|
err = destroyBundle(t, ctx, root)
|
||||||
|
require.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
err = deployBundle(t, ctx, root)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Load bundle configuration by running the validate command.
|
||||||
|
b := unmarshalConfig(t, mustValidateBundle(t, ctx, root))
|
||||||
|
|
||||||
|
// Assert that the dashboard exists at the expected path and is, indeed, a dashboard.
|
||||||
|
oi, err := wt.W.Workspace.GetStatusByPath(ctx, fmt.Sprintf("%s/test-dashboard-%s.lvdash.json", b.Config.Workspace.ResourcePath, uniqueID))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.EqualValues(t, workspace.ObjectTypeDashboard, oi.ObjectType)
|
||||||
|
|
||||||
|
// Load the dashboard by its ID and confirm its display name.
|
||||||
|
dashboard, err := wt.W.Lakeview.GetByDashboardId(ctx, oi.ResourceId)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, fmt.Sprintf("test-dashboard-%s", uniqueID), dashboard.DisplayName)
|
||||||
|
|
||||||
|
// Make an out of band modification to the dashboard and confirm that it is detected.
|
||||||
|
_, err = wt.W.Lakeview.Update(ctx, dashboards.UpdateDashboardRequest{
|
||||||
|
DashboardId: oi.ResourceId,
|
||||||
|
SerializedDashboard: dashboard.SerializedDashboard,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Try to redeploy the bundle and confirm that the out of band modification is detected.
|
||||||
|
stdout, _, err := deployBundleWithArgs(t, ctx, root)
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Contains(t, stdout, `Error: dashboard "file_reference" has been modified remotely`+"\n")
|
||||||
|
|
||||||
|
// Redeploy the bundle with the --force flag and confirm that the out of band modification is ignored.
|
||||||
|
_, stderr, err := deployBundleWithArgs(t, ctx, root, "--force")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Contains(t, stderr, `Deployment complete!`+"\n")
|
||||||
|
}
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/cmd/root"
|
"github.com/databricks/cli/cmd/root"
|
||||||
"github.com/databricks/cli/internal"
|
"github.com/databricks/cli/internal"
|
||||||
"github.com/databricks/cli/libs/cmdio"
|
"github.com/databricks/cli/libs/cmdio"
|
||||||
|
@ -66,6 +67,19 @@ func validateBundle(t *testing.T, ctx context.Context, path string) ([]byte, err
|
||||||
return stdout.Bytes(), err
|
return stdout.Bytes(), err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func mustValidateBundle(t *testing.T, ctx context.Context, path string) []byte {
|
||||||
|
data, err := validateBundle(t, ctx, path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshalConfig(t *testing.T, data []byte) *bundle.Bundle {
|
||||||
|
bundle := &bundle.Bundle{}
|
||||||
|
err := json.Unmarshal(data, &bundle.Config)
|
||||||
|
require.NoError(t, err)
|
||||||
|
return bundle
|
||||||
|
}
|
||||||
|
|
||||||
func deployBundle(t *testing.T, ctx context.Context, path string) error {
|
func deployBundle(t *testing.T, ctx context.Context, path string) error {
|
||||||
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
|
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
|
||||||
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve")
|
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve")
|
||||||
|
@ -73,6 +87,14 @@ func deployBundle(t *testing.T, ctx context.Context, path string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func deployBundleWithArgs(t *testing.T, ctx context.Context, path string, args ...string) (string, string, error) {
|
||||||
|
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
|
||||||
|
args = append([]string{"bundle", "deploy"}, args...)
|
||||||
|
c := internal.NewCobraTestRunnerWithContext(t, ctx, args...)
|
||||||
|
stdout, stderr, err := c.Run()
|
||||||
|
return stdout.String(), stderr.String(), err
|
||||||
|
}
|
||||||
|
|
||||||
func deployBundleWithFlags(t *testing.T, ctx context.Context, path string, flags []string) error {
|
func deployBundleWithFlags(t *testing.T, ctx context.Context, path string, flags []string) error {
|
||||||
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
|
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
|
||||||
args := []string{"bundle", "deploy", "--force-lock"}
|
args := []string{"bundle", "deploy", "--force-lock"}
|
||||||
|
|
Loading…
Reference in New Issue