mirror of https://github.com/databricks/cli.git
Merge cfcb787fbc
into abfd1713e0
This commit is contained in:
commit
8279aee362
|
@ -222,6 +222,8 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
|
|||
dashboard.DisplayName = prefix + dashboard.DisplayName
|
||||
}
|
||||
|
||||
// Apps doesn't support tags or prefixes yet.
|
||||
|
||||
if config.IsExplicitlyEnabled((b.Config.Presets.SourceLinkedDeployment)) {
|
||||
isDatabricksWorkspace := dbr.RunsOnRuntime(ctx) && strings.HasPrefix(b.SyncRootPath, "/Workspace/")
|
||||
if !isDatabricksWorkspace {
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/libs/diag"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/merge"
|
||||
)
|
||||
|
||||
type mergeApps struct{}
|
||||
|
||||
func MergeApps() bundle.Mutator {
|
||||
return &mergeApps{}
|
||||
}
|
||||
|
||||
func (m *mergeApps) Name() string {
|
||||
return "MergeApps"
|
||||
}
|
||||
|
||||
func (m *mergeApps) resourceName(v dyn.Value) string {
|
||||
switch v.Kind() {
|
||||
case dyn.KindInvalid, dyn.KindNil:
|
||||
return ""
|
||||
case dyn.KindString:
|
||||
return v.MustString()
|
||||
default:
|
||||
panic("job cluster key must be a string")
|
||||
}
|
||||
}
|
||||
|
||||
func (m *mergeApps) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
if v.Kind() == dyn.KindNil {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
return dyn.Map(v, "resources.apps", dyn.Foreach(func(_ dyn.Path, app dyn.Value) (dyn.Value, error) {
|
||||
return dyn.Map(app, "resources", merge.ElementsByKey("name", m.resourceName))
|
||||
}))
|
||||
})
|
||||
|
||||
return diag.FromErr(err)
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package mutator_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestMergeApps(t *testing.T) {
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Apps: map[string]*resources.App{
|
||||
"foo": {
|
||||
App: &apps.App{
|
||||
Name: "foo",
|
||||
Resources: []apps.AppResource{
|
||||
{
|
||||
Name: "job1",
|
||||
Job: &apps.AppResourceJob{
|
||||
Id: "1234",
|
||||
Permission: "CAN_MANAGE_RUN",
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "sql1",
|
||||
SqlWarehouse: &apps.AppResourceSqlWarehouse{
|
||||
Id: "5678",
|
||||
Permission: "CAN_USE",
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "job1",
|
||||
Job: &apps.AppResourceJob{
|
||||
Id: "1234",
|
||||
Permission: "CAN_MANAGE",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
diags := bundle.Apply(context.Background(), b, mutator.MergeApps())
|
||||
assert.NoError(t, diags.Error())
|
||||
|
||||
j := b.Config.Resources.Apps["foo"]
|
||||
|
||||
assert.Len(t, j.Resources, 2)
|
||||
assert.Equal(t, "job1", j.Resources[0].Name)
|
||||
assert.Equal(t, "sql1", j.Resources[1].Name)
|
||||
|
||||
assert.Equal(t, "CAN_MANAGE", string(j.Resources[0].Job.Permission))
|
||||
assert.Equal(t, "CAN_USE", string(j.Resources[1].SqlWarehouse.Permission))
|
||||
}
|
|
@ -262,6 +262,7 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
|
|||
t.applyPipelineTranslations,
|
||||
t.applyArtifactTranslations,
|
||||
t.applyDashboardTranslations,
|
||||
t.applyAppsTranslations,
|
||||
} {
|
||||
v, err = fn(v)
|
||||
if err != nil {
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
)
|
||||
|
||||
func (t *translateContext) applyAppsTranslations(v dyn.Value) (dyn.Value, error) {
|
||||
// Convert the `source_code_path` field to a remote absolute path.
|
||||
// We use this path for app deployment to point to the source code.
|
||||
pattern := dyn.NewPattern(
|
||||
dyn.Key("resources"),
|
||||
dyn.Key("apps"),
|
||||
dyn.AnyKey(),
|
||||
dyn.Key("source_code_path"),
|
||||
)
|
||||
|
||||
return dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
key := p[2].Key()
|
||||
dir, err := v.Location().Directory()
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for app %s: %w", key, err)
|
||||
}
|
||||
|
||||
return t.rewriteRelativeTo(p, v, t.translateDirectoryPath, dir, "")
|
||||
})
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package mutator_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/vfs"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTranslatePathsApps_FilePathRelativeSubDirectory(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
touchEmptyFile(t, filepath.Join(dir, "src", "my_app.lvdash.json"))
|
||||
|
||||
b := &bundle.Bundle{
|
||||
SyncRootPath: dir,
|
||||
SyncRoot: vfs.MustNew(dir),
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Apps: map[string]*resources.App{
|
||||
"app": {
|
||||
App: &apps.App{
|
||||
Name: "My App",
|
||||
},
|
||||
SourceCodePath: "../src/",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
bundletest.SetLocation(b, "resources.apps", []dyn.Location{{
|
||||
File: filepath.Join(dir, "resources/app.yml"),
|
||||
}})
|
||||
|
||||
diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
// Assert that the file path for the app has been converted to its local absolute path.
|
||||
assert.Equal(
|
||||
t,
|
||||
filepath.Join(dir, "src"),
|
||||
b.Config.Resources.Apps["app"].SourceCodePath,
|
||||
)
|
||||
}
|
|
@ -22,6 +22,7 @@ type Resources struct {
|
|||
Schemas map[string]*resources.Schema `json:"schemas,omitempty"`
|
||||
Clusters map[string]*resources.Cluster `json:"clusters,omitempty"`
|
||||
Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"`
|
||||
Apps map[string]*resources.App `json:"apps,omitempty"`
|
||||
}
|
||||
|
||||
type ConfigResource interface {
|
||||
|
@ -79,6 +80,7 @@ func (r *Resources) AllResources() []ResourceGroup {
|
|||
collectResourceMap(descriptions["schemas"], r.Schemas),
|
||||
collectResourceMap(descriptions["clusters"], r.Clusters),
|
||||
collectResourceMap(descriptions["dashboards"], r.Dashboards),
|
||||
collectResourceMap(descriptions["apps"], r.Apps),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,5 +185,11 @@ func SupportedResources() map[string]ResourceDescription {
|
|||
SingularTitle: "Dashboard",
|
||||
PluralTitle: "Dashboards",
|
||||
},
|
||||
"apps": {
|
||||
SingularName: "app",
|
||||
PluralName: "apps",
|
||||
SingularTitle: "App",
|
||||
PluralTitle: "Apps",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
package resources
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/databricks/cli/libs/log"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
"github.com/databricks/databricks-sdk-go/marshal"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
)
|
||||
|
||||
type App struct {
|
||||
// This represents the id which is the name of the app that can be used
|
||||
// as a reference in other resources. This value is returned by terraform.
|
||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||
|
||||
// SourceCodePath is a required field used by DABs to point databricks app source code
|
||||
// on local disk and use it to point to this source code in the app deployment
|
||||
SourceCodePath string `json:"source_code_path"`
|
||||
|
||||
// Config is an optional field which allows configuring the app following Databricks app configuration format like in app.yml.
|
||||
// When this field is set, DABs read the configuration set in this field and write
|
||||
// it to app.yml in the root of the source code folder in Databricks workspace.
|
||||
// If there’s app.yml defined already, it will be overridden.
|
||||
Config map[string]interface{} `json:"config,omitempty"`
|
||||
|
||||
Permissions []Permission `json:"permissions,omitempty"`
|
||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||
URL string `json:"url,omitempty" bundle:"internal"`
|
||||
|
||||
*apps.App
|
||||
}
|
||||
|
||||
func (a *App) UnmarshalJSON(b []byte) error {
|
||||
return marshal.Unmarshal(b, a)
|
||||
}
|
||||
|
||||
func (a App) MarshalJSON() ([]byte, error) {
|
||||
return marshal.Marshal(a)
|
||||
}
|
||||
|
||||
func (a *App) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) {
|
||||
_, err := w.Apps.GetByName(ctx, name)
|
||||
if err != nil {
|
||||
log.Debugf(ctx, "app %s does not exist", name)
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (a *App) TerraformResourceName() string {
|
||||
return "databricks_cluster"
|
||||
}
|
||||
|
||||
func (a *App) InitializeURL(baseURL url.URL) {
|
||||
if a.ID == "" {
|
||||
return
|
||||
}
|
||||
baseURL.Path = fmt.Sprintf("apps/%s", a.ID)
|
||||
a.URL = baseURL.String()
|
||||
}
|
||||
|
||||
func (a *App) GetName() string {
|
||||
return a.Name
|
||||
}
|
||||
|
||||
func (a *App) GetURL() string {
|
||||
return a.URL
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
package apps
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/cli/bundle/deploy"
|
||||
"github.com/databricks/cli/libs/cmdio"
|
||||
"github.com/databricks/cli/libs/diag"
|
||||
"github.com/databricks/cli/libs/filer"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
type appsDeploy struct {
|
||||
filerFactory deploy.FilerFactory
|
||||
}
|
||||
|
||||
func Deploy() bundle.Mutator {
|
||||
return appsDeploy{deploy.AppFiler}
|
||||
}
|
||||
|
||||
func (a appsDeploy) Name() string {
|
||||
return "apps.Deploy"
|
||||
}
|
||||
|
||||
func (a appsDeploy) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
if len(b.Config.Resources.Apps) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
errGrp, ctx := errgroup.WithContext(ctx)
|
||||
w := b.WorkspaceClient()
|
||||
f, err := a.filerFactory(b)
|
||||
if err != nil {
|
||||
return diag.FromErr(err)
|
||||
}
|
||||
|
||||
for _, app := range b.Config.Resources.Apps {
|
||||
cmdio.LogString(ctx, fmt.Sprintf("Deploying app %s...", app.Name))
|
||||
errGrp.Go(func() error {
|
||||
// If the app has a config, we need to deploy it first.
|
||||
// It means we need to write app.yml file with the content of the config field
|
||||
// to the remote source code path of the app.
|
||||
if app.Config != nil {
|
||||
appPath, err := filepath.Rel(b.Config.Workspace.FilePath, app.SourceCodePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get relative path of app source code path: %w", err)
|
||||
}
|
||||
|
||||
buf, err := configToYaml(app)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = f.Write(ctx, path.Join(appPath, "app.yml"), buf, filer.OverwriteIfExists)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write %s file: %w", path.Join(app.SourceCodePath, "app.yml"), err)
|
||||
}
|
||||
}
|
||||
|
||||
wait, err := w.Apps.Deploy(ctx, apps.CreateAppDeploymentRequest{
|
||||
AppName: app.Name,
|
||||
AppDeployment: &apps.AppDeployment{
|
||||
Mode: apps.AppDeploymentModeSnapshot,
|
||||
SourceCodePath: app.SourceCodePath,
|
||||
},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = wait.Get()
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
if err := errGrp.Wait(); err != nil {
|
||||
return diag.FromErr(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func configToYaml(app *resources.App) (*bytes.Buffer, error) {
|
||||
buf := bytes.NewBuffer(nil)
|
||||
enc := yaml.NewEncoder(buf)
|
||||
enc.SetIndent(2)
|
||||
|
||||
err := enc.Encode(app.Config)
|
||||
defer enc.Close()
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to encode app config to yaml: %w", err)
|
||||
}
|
||||
|
||||
return buf, nil
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
package apps
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||
mockfiler "github.com/databricks/cli/internal/mocks/libs/filer"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/filer"
|
||||
"github.com/databricks/cli/libs/vfs"
|
||||
"github.com/databricks/databricks-sdk-go/experimental/mocks"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAppDeploy(t *testing.T) {
|
||||
root := t.TempDir()
|
||||
err := os.MkdirAll(filepath.Join(root, "app1"), 0700)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = os.MkdirAll(filepath.Join(root, "app2"), 0700)
|
||||
require.NoError(t, err)
|
||||
|
||||
b := &bundle.Bundle{
|
||||
BundleRootPath: root,
|
||||
SyncRoot: vfs.MustNew(root),
|
||||
Config: config.Root{
|
||||
Workspace: config.Workspace{
|
||||
RootPath: "/Workspace/Users/foo@bar.com/",
|
||||
},
|
||||
Resources: config.Resources{
|
||||
Apps: map[string]*resources.App{
|
||||
"app1": {
|
||||
App: &apps.App{
|
||||
Name: "app1",
|
||||
},
|
||||
SourceCodePath: "./app1",
|
||||
Config: map[string]interface{}{
|
||||
"command": []string{"echo", "hello"},
|
||||
"env": []map[string]string{
|
||||
{"name": "MY_APP", "value": "my value"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"app2": {
|
||||
App: &apps.App{
|
||||
Name: "app2",
|
||||
},
|
||||
SourceCodePath: "./app2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mwc := mocks.NewMockWorkspaceClient(t)
|
||||
b.SetWorkpaceClient(mwc.WorkspaceClient)
|
||||
|
||||
wait := &apps.WaitGetDeploymentAppSucceeded[apps.AppDeployment]{
|
||||
Poll: func(_ time.Duration, _ func(*apps.AppDeployment)) (*apps.AppDeployment, error) {
|
||||
return nil, nil
|
||||
},
|
||||
}
|
||||
appApi := mwc.GetMockAppsAPI()
|
||||
appApi.EXPECT().Deploy(mock.Anything, apps.CreateAppDeploymentRequest{
|
||||
AppName: "app1",
|
||||
AppDeployment: &apps.AppDeployment{
|
||||
Mode: apps.AppDeploymentModeSnapshot,
|
||||
SourceCodePath: "/Workspace/Users/foo@bar.com/files/app1",
|
||||
},
|
||||
}).Return(wait, nil)
|
||||
|
||||
appApi.EXPECT().Deploy(mock.Anything, apps.CreateAppDeploymentRequest{
|
||||
AppName: "app2",
|
||||
AppDeployment: &apps.AppDeployment{
|
||||
Mode: apps.AppDeploymentModeSnapshot,
|
||||
SourceCodePath: "/Workspace/Users/foo@bar.com/files/app2",
|
||||
},
|
||||
}).Return(wait, nil)
|
||||
|
||||
mockFiler := mockfiler.NewMockFiler(t)
|
||||
mockFiler.EXPECT().Write(mock.Anything, "app1/app.yml", bytes.NewBufferString(`command:
|
||||
- echo
|
||||
- hello
|
||||
env:
|
||||
- name: MY_APP
|
||||
value: my value
|
||||
`), filer.OverwriteIfExists).Return(nil)
|
||||
|
||||
bundletest.SetLocation(b, "resources.apps.app1", []dyn.Location{{File: "./databricks.yml"}})
|
||||
bundletest.SetLocation(b, "resources.apps.app2", []dyn.Location{{File: "./databricks.yml"}})
|
||||
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(
|
||||
mutator.DefineDefaultWorkspacePaths(),
|
||||
mutator.TranslatePaths(),
|
||||
appsDeploy{
|
||||
func(b *bundle.Bundle) (filer.Filer, error) {
|
||||
return mockFiler, nil
|
||||
},
|
||||
}))
|
||||
require.Empty(t, diags)
|
||||
}
|
|
@ -12,3 +12,8 @@ type FilerFactory func(b *bundle.Bundle) (filer.Filer, error)
|
|||
func StateFiler(b *bundle.Bundle) (filer.Filer, error) {
|
||||
return filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.StatePath)
|
||||
}
|
||||
|
||||
// AppFiler returns a filer.Filer that can be used to read/write Databricks apps related files.
|
||||
func AppFiler(b *bundle.Bundle) (filer.Filer, error) {
|
||||
return filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.FilePath)
|
||||
}
|
||||
|
|
|
@ -186,6 +186,16 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
|
|||
}
|
||||
cur.ID = instance.Attributes.ID
|
||||
config.Resources.Dashboards[resource.Name] = cur
|
||||
case "databricks_app":
|
||||
if config.Resources.Apps == nil {
|
||||
config.Resources.Apps = make(map[string]*resources.App)
|
||||
}
|
||||
cur := config.Resources.Apps[resource.Name]
|
||||
if cur == nil {
|
||||
cur = &resources.App{ModifiedStatus: resources.ModifiedStatusDeleted}
|
||||
}
|
||||
cur.ID = instance.Attributes.ID
|
||||
config.Resources.Apps[resource.Name] = cur
|
||||
case "databricks_permissions":
|
||||
case "databricks_grants":
|
||||
// Ignore; no need to pull these back into the configuration.
|
||||
|
@ -245,6 +255,11 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
|
|||
src.ModifiedStatus = resources.ModifiedStatusCreated
|
||||
}
|
||||
}
|
||||
for _, src := range config.Resources.Apps {
|
||||
if src.ModifiedStatus == "" && src.ID == "" {
|
||||
src.ModifiedStatus = resources.ModifiedStatusCreated
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import (
|
|||
"github.com/databricks/cli/bundle/internal/tf/schema"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/convert"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||
"github.com/databricks/databricks-sdk-go/service/dashboards"
|
||||
|
@ -686,6 +687,14 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
|
|||
{Attributes: stateInstanceAttributes{ID: "1"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: "databricks_app",
|
||||
Mode: "managed",
|
||||
Name: "test_app",
|
||||
Instances: []stateResourceInstance{
|
||||
{Attributes: stateInstanceAttributes{ID: "1"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := TerraformToBundle(&tfState, &config)
|
||||
|
@ -721,6 +730,9 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
|
|||
assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
|
||||
|
||||
assert.Equal(t, "1", config.Resources.Apps["test_app"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app"].ModifiedStatus)
|
||||
|
||||
AssertFullResourceCoverage(t, &config)
|
||||
}
|
||||
|
||||
|
@ -797,6 +809,13 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
Apps: map[string]*resources.App{
|
||||
"test_app": {
|
||||
App: &apps.App{
|
||||
Name: "test_app",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
var tfState = resourcesState{
|
||||
|
@ -835,6 +854,9 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
|
|||
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
|
||||
|
||||
assert.Equal(t, "", config.Resources.Apps["test_app"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app"].ModifiedStatus)
|
||||
|
||||
AssertFullResourceCoverage(t, &config)
|
||||
}
|
||||
|
||||
|
@ -961,6 +983,18 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
Apps: map[string]*resources.App{
|
||||
"test_app": {
|
||||
App: &apps.App{
|
||||
Name: "test_app",
|
||||
},
|
||||
},
|
||||
"test_app_new": {
|
||||
App: &apps.App{
|
||||
Name: "test_app_new",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
var tfState = resourcesState{
|
||||
|
@ -1125,6 +1159,22 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
|
|||
{Attributes: stateInstanceAttributes{ID: "2"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: "databricks_app",
|
||||
Mode: "managed",
|
||||
Name: "test_app",
|
||||
Instances: []stateResourceInstance{
|
||||
{Attributes: stateInstanceAttributes{ID: "1"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: "databricks_app",
|
||||
Mode: "managed",
|
||||
Name: "test_app_old",
|
||||
Instances: []stateResourceInstance{
|
||||
{Attributes: stateInstanceAttributes{ID: "2"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := TerraformToBundle(&tfState, &config)
|
||||
|
@ -1200,6 +1250,13 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
|
|||
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus)
|
||||
|
||||
assert.Equal(t, "1", config.Resources.Apps["test_app"].ID)
|
||||
assert.Equal(t, "", config.Resources.Apps["test_app"].ModifiedStatus)
|
||||
assert.Equal(t, "2", config.Resources.Apps["test_app_old"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app_old"].ModifiedStatus)
|
||||
assert.Equal(t, "", config.Resources.Apps["test_app_new"].ID)
|
||||
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app_new"].ModifiedStatus)
|
||||
|
||||
AssertFullResourceCoverage(t, &config)
|
||||
}
|
||||
|
||||
|
|
|
@ -62,6 +62,8 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.D
|
|||
path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...)
|
||||
case dyn.Key("dashboards"):
|
||||
path = dyn.NewPath(dyn.Key("databricks_dashboard")).Append(path[2:]...)
|
||||
case dyn.Key("apps"):
|
||||
path = dyn.NewPath(dyn.Key("databricks_app")).Append(path[2:]...)
|
||||
default:
|
||||
// Trigger "key not found" for unknown resource types.
|
||||
return dyn.GetByPath(root, path)
|
||||
|
|
|
@ -33,6 +33,7 @@ func TestInterpolate(t *testing.T) {
|
|||
"other_schema": "${resources.schemas.other_schema.id}",
|
||||
"other_cluster": "${resources.clusters.other_cluster.id}",
|
||||
"other_dashboard": "${resources.dashboards.other_dashboard.id}",
|
||||
"other_app": "${resources.apps.other_app.id}",
|
||||
},
|
||||
Tasks: []jobs.Task{
|
||||
{
|
||||
|
@ -71,6 +72,7 @@ func TestInterpolate(t *testing.T) {
|
|||
assert.Equal(t, "${databricks_schema.other_schema.id}", j.Tags["other_schema"])
|
||||
assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"])
|
||||
assert.Equal(t, "${databricks_dashboard.other_dashboard.id}", j.Tags["other_dashboard"])
|
||||
assert.Equal(t, "${databricks_app.other_app.id}", j.Tags["other_app"])
|
||||
|
||||
m := b.Config.Resources.Models["my_model"]
|
||||
assert.Equal(t, "my_model", m.Model.Name)
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
package tfdyn
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/databricks/cli/bundle/internal/tf/schema"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/convert"
|
||||
"github.com/databricks/cli/libs/log"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
)
|
||||
|
||||
func convertAppResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
|
||||
// Normalize the output value to the target schema.
|
||||
vout, diags := convert.Normalize(apps.App{}, vin)
|
||||
for _, diag := range diags {
|
||||
log.Debugf(ctx, "app normalization diagnostic: %s", diag.Summary)
|
||||
}
|
||||
|
||||
return vout, nil
|
||||
}
|
||||
|
||||
type appConverter struct{}
|
||||
|
||||
func (appConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
|
||||
vout, err := convertAppResource(ctx, vin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Modify top-level keys.
|
||||
vout, err = renameKeys(vout, map[string]string{
|
||||
"resources": "resource",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Add the converted resource to the output.
|
||||
out.App[key] = vout.AsAny()
|
||||
|
||||
// Configure permissions for this resource.
|
||||
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
|
||||
// TODO: add when permissions are supported in TF
|
||||
/*
|
||||
permissions.AppId = fmt.Sprintf("${databricks_app.%s.id}", key)
|
||||
out.Permissions["app_"+key] = permissions
|
||||
*/
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerConverter("apps", appConverter{})
|
||||
}
|
|
@ -0,0 +1,102 @@
|
|||
package tfdyn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/cli/bundle/internal/tf/schema"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/convert"
|
||||
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestConvertApp(t *testing.T) {
|
||||
var src = resources.App{
|
||||
SourceCodePath: "./app",
|
||||
Config: map[string]interface{}{
|
||||
"command": []string{"python", "app.py"},
|
||||
},
|
||||
App: &apps.App{
|
||||
Name: "app_id",
|
||||
Description: "app description",
|
||||
Resources: []apps.AppResource{
|
||||
{
|
||||
Name: "job1",
|
||||
Job: &apps.AppResourceJob{
|
||||
Id: "1234",
|
||||
Permission: "CAN_MANAGE_RUN",
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "sql1",
|
||||
SqlWarehouse: &apps.AppResourceSqlWarehouse{
|
||||
Id: "5678",
|
||||
Permission: "CAN_USE",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Permissions: []resources.Permission{
|
||||
{
|
||||
Level: "CAN_RUN",
|
||||
UserName: "jack@gmail.com",
|
||||
},
|
||||
{
|
||||
Level: "CAN_MANAGE",
|
||||
ServicePrincipalName: "sp",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
vin, err := convert.FromTyped(src, dyn.NilValue)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
out := schema.NewResources()
|
||||
err = appConverter{}.Convert(ctx, "my_app", vin, out)
|
||||
require.NoError(t, err)
|
||||
|
||||
app := out.App["my_app"]
|
||||
assert.Equal(t, map[string]interface{}{
|
||||
"description": "app description",
|
||||
"name": "app_id",
|
||||
"resource": []interface{}{
|
||||
map[string]interface{}{
|
||||
"name": "job1",
|
||||
"job": map[string]interface{}{
|
||||
"id": "1234",
|
||||
"permission": "CAN_MANAGE_RUN",
|
||||
},
|
||||
},
|
||||
map[string]interface{}{
|
||||
"name": "sql1",
|
||||
"sql_warehouse": map[string]interface{}{
|
||||
"id": "5678",
|
||||
"permission": "CAN_USE",
|
||||
},
|
||||
},
|
||||
},
|
||||
}, app)
|
||||
|
||||
// TODO: Add when permissions are supported in TF
|
||||
/*
|
||||
// Assert equality on the permissions
|
||||
assert.Equal(t, &schema.ResourcePermissions{
|
||||
AppId: "${databricks_app.my_app.id}",
|
||||
AccessControl: []schema.ResourcePermissionsAccessControl{
|
||||
{
|
||||
PermissionLevel: "CAN_RUN",
|
||||
UserName: "jack@gmail.com",
|
||||
},
|
||||
{
|
||||
PermissionLevel: "CAN_MANAGE",
|
||||
ServicePrincipalName: "sp",
|
||||
},
|
||||
},
|
||||
}, out.Permissions["app_my_app"])
|
||||
*/
|
||||
|
||||
}
|
|
@ -3,6 +3,7 @@
|
|||
package schema
|
||||
|
||||
type DataSourceAwsAssumeRolePolicy struct {
|
||||
AwsPartition string `json:"aws_partition,omitempty"`
|
||||
DatabricksAccountId string `json:"databricks_account_id,omitempty"`
|
||||
ExternalId string `json:"external_id"`
|
||||
ForLogDelivery bool `json:"for_log_delivery,omitempty"`
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
package schema
|
||||
|
||||
type DataSourceAwsBucketPolicy struct {
|
||||
AwsPartition string `json:"aws_partition,omitempty"`
|
||||
Bucket string `json:"bucket"`
|
||||
DatabricksAccountId string `json:"databricks_account_id,omitempty"`
|
||||
DatabricksE2AccountId string `json:"databricks_e2_account_id,omitempty"`
|
||||
|
|
|
@ -4,6 +4,7 @@ package schema
|
|||
|
||||
type DataSourceAwsCrossaccountPolicy struct {
|
||||
AwsAccountId string `json:"aws_account_id,omitempty"`
|
||||
AwsPartition string `json:"aws_partition,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
Json string `json:"json,omitempty"`
|
||||
PassRoles []string `json:"pass_roles,omitempty"`
|
||||
|
|
|
@ -4,6 +4,7 @@ package schema
|
|||
|
||||
type DataSourceAwsUnityCatalogAssumeRolePolicy struct {
|
||||
AwsAccountId string `json:"aws_account_id"`
|
||||
AwsPartition string `json:"aws_partition,omitempty"`
|
||||
ExternalId string `json:"external_id"`
|
||||
Id string `json:"id,omitempty"`
|
||||
Json string `json:"json,omitempty"`
|
||||
|
|
|
@ -4,6 +4,7 @@ package schema
|
|||
|
||||
type DataSourceAwsUnityCatalogPolicy struct {
|
||||
AwsAccountId string `json:"aws_account_id"`
|
||||
AwsPartition string `json:"aws_partition,omitempty"`
|
||||
BucketName string `json:"bucket_name"`
|
||||
Id string `json:"id,omitempty"`
|
||||
Json string `json:"json,omitempty"`
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
|
||||
|
||||
package schema
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigEgressConfigDefaultRulesAwsStableIpRule struct {
|
||||
CidrBlocks []string `json:"cidr_blocks,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigEgressConfigDefaultRulesAzureServiceEndpointRule struct {
|
||||
Subnets []string `json:"subnets,omitempty"`
|
||||
TargetRegion string `json:"target_region,omitempty"`
|
||||
TargetServices []string `json:"target_services,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigEgressConfigDefaultRules struct {
|
||||
AwsStableIpRule *DataSourceMwsNetworkConnectivityConfigEgressConfigDefaultRulesAwsStableIpRule `json:"aws_stable_ip_rule,omitempty"`
|
||||
AzureServiceEndpointRule *DataSourceMwsNetworkConnectivityConfigEgressConfigDefaultRulesAzureServiceEndpointRule `json:"azure_service_endpoint_rule,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigEgressConfigTargetRulesAzurePrivateEndpointRules struct {
|
||||
ConnectionState string `json:"connection_state,omitempty"`
|
||||
CreationTime int `json:"creation_time,omitempty"`
|
||||
Deactivated bool `json:"deactivated,omitempty"`
|
||||
DeactivatedAt int `json:"deactivated_at,omitempty"`
|
||||
EndpointName string `json:"endpoint_name,omitempty"`
|
||||
GroupId string `json:"group_id,omitempty"`
|
||||
NetworkConnectivityConfigId string `json:"network_connectivity_config_id,omitempty"`
|
||||
ResourceId string `json:"resource_id,omitempty"`
|
||||
RuleId string `json:"rule_id,omitempty"`
|
||||
UpdatedTime int `json:"updated_time,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigEgressConfigTargetRules struct {
|
||||
AzurePrivateEndpointRules []DataSourceMwsNetworkConnectivityConfigEgressConfigTargetRulesAzurePrivateEndpointRules `json:"azure_private_endpoint_rules,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigEgressConfig struct {
|
||||
DefaultRules *DataSourceMwsNetworkConnectivityConfigEgressConfigDefaultRules `json:"default_rules,omitempty"`
|
||||
TargetRules *DataSourceMwsNetworkConnectivityConfigEgressConfigTargetRules `json:"target_rules,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfig struct {
|
||||
AccountId string `json:"account_id,omitempty"`
|
||||
CreationTime int `json:"creation_time,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
Name string `json:"name"`
|
||||
NetworkConnectivityConfigId string `json:"network_connectivity_config_id,omitempty"`
|
||||
Region string `json:"region,omitempty"`
|
||||
UpdatedTime int `json:"updated_time,omitempty"`
|
||||
EgressConfig *DataSourceMwsNetworkConnectivityConfigEgressConfig `json:"egress_config,omitempty"`
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
|
||||
|
||||
package schema
|
||||
|
||||
type DataSourceMwsNetworkConnectivityConfigs struct {
|
||||
Id string `json:"id,omitempty"`
|
||||
Names []string `json:"names,omitempty"`
|
||||
Region string `json:"region,omitempty"`
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
|
||||
|
||||
package schema
|
||||
|
||||
type DataSourceRegisteredModelVersionsModelVersionsAliases struct {
|
||||
AliasName string `json:"alias_name,omitempty"`
|
||||
VersionNum int `json:"version_num,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceRegisteredModelVersionsModelVersionsModelVersionDependenciesDependenciesFunction struct {
|
||||
FunctionFullName string `json:"function_full_name"`
|
||||
}
|
||||
|
||||
type DataSourceRegisteredModelVersionsModelVersionsModelVersionDependenciesDependenciesTable struct {
|
||||
TableFullName string `json:"table_full_name"`
|
||||
}
|
||||
|
||||
type DataSourceRegisteredModelVersionsModelVersionsModelVersionDependenciesDependencies struct {
|
||||
Function []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependenciesDependenciesFunction `json:"function,omitempty"`
|
||||
Table []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependenciesDependenciesTable `json:"table,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies struct {
|
||||
Dependencies []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependenciesDependencies `json:"dependencies,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceRegisteredModelVersionsModelVersions struct {
|
||||
BrowseOnly bool `json:"browse_only,omitempty"`
|
||||
CatalogName string `json:"catalog_name,omitempty"`
|
||||
Comment string `json:"comment,omitempty"`
|
||||
CreatedAt int `json:"created_at,omitempty"`
|
||||
CreatedBy string `json:"created_by,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
MetastoreId string `json:"metastore_id,omitempty"`
|
||||
ModelName string `json:"model_name,omitempty"`
|
||||
RunId string `json:"run_id,omitempty"`
|
||||
RunWorkspaceId int `json:"run_workspace_id,omitempty"`
|
||||
SchemaName string `json:"schema_name,omitempty"`
|
||||
Source string `json:"source,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
StorageLocation string `json:"storage_location,omitempty"`
|
||||
UpdatedAt int `json:"updated_at,omitempty"`
|
||||
UpdatedBy string `json:"updated_by,omitempty"`
|
||||
Version int `json:"version,omitempty"`
|
||||
Aliases []DataSourceRegisteredModelVersionsModelVersionsAliases `json:"aliases,omitempty"`
|
||||
ModelVersionDependencies []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies `json:"model_version_dependencies,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceRegisteredModelVersions struct {
|
||||
FullName string `json:"full_name"`
|
||||
ModelVersions []DataSourceRegisteredModelVersionsModelVersions `json:"model_versions,omitempty"`
|
||||
}
|
|
@ -0,0 +1,178 @@
|
|||
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
|
||||
|
||||
package schema
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii struct {
|
||||
Behavior string `json:"behavior"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct {
|
||||
InvalidKeywords []string `json:"invalid_keywords,omitempty"`
|
||||
Safety bool `json:"safety,omitempty"`
|
||||
ValidTopics []string `json:"valid_topics,omitempty"`
|
||||
Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii `json:"pii,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct {
|
||||
Behavior string `json:"behavior"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutput struct {
|
||||
InvalidKeywords []string `json:"invalid_keywords,omitempty"`
|
||||
Safety bool `json:"safety,omitempty"`
|
||||
ValidTopics []string `json:"valid_topics,omitempty"`
|
||||
Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii `json:"pii,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayGuardrails struct {
|
||||
Input []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput `json:"input,omitempty"`
|
||||
Output []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutput `json:"output,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayInferenceTableConfig struct {
|
||||
CatalogName string `json:"catalog_name,omitempty"`
|
||||
Enabled bool `json:"enabled,omitempty"`
|
||||
SchemaName string `json:"schema_name,omitempty"`
|
||||
TableNamePrefix string `json:"table_name_prefix,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayRateLimits struct {
|
||||
Calls int `json:"calls"`
|
||||
Key string `json:"key,omitempty"`
|
||||
RenewalPeriod string `json:"renewal_period"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGatewayUsageTrackingConfig struct {
|
||||
Enabled bool `json:"enabled,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsAiGateway struct {
|
||||
Guardrails []DataSourceServingEndpointsEndpointsAiGatewayGuardrails `json:"guardrails,omitempty"`
|
||||
InferenceTableConfig []DataSourceServingEndpointsEndpointsAiGatewayInferenceTableConfig `json:"inference_table_config,omitempty"`
|
||||
RateLimits []DataSourceServingEndpointsEndpointsAiGatewayRateLimits `json:"rate_limits,omitempty"`
|
||||
UsageTrackingConfig []DataSourceServingEndpointsEndpointsAiGatewayUsageTrackingConfig `json:"usage_tracking_config,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAi21LabsConfig struct {
|
||||
Ai21LabsApiKey string `json:"ai21labs_api_key,omitempty"`
|
||||
Ai21LabsApiKeyPlaintext string `json:"ai21labs_api_key_plaintext,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAmazonBedrockConfig struct {
|
||||
AwsAccessKeyId string `json:"aws_access_key_id,omitempty"`
|
||||
AwsAccessKeyIdPlaintext string `json:"aws_access_key_id_plaintext,omitempty"`
|
||||
AwsRegion string `json:"aws_region"`
|
||||
AwsSecretAccessKey string `json:"aws_secret_access_key,omitempty"`
|
||||
AwsSecretAccessKeyPlaintext string `json:"aws_secret_access_key_plaintext,omitempty"`
|
||||
BedrockProvider string `json:"bedrock_provider"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAnthropicConfig struct {
|
||||
AnthropicApiKey string `json:"anthropic_api_key,omitempty"`
|
||||
AnthropicApiKeyPlaintext string `json:"anthropic_api_key_plaintext,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelCohereConfig struct {
|
||||
CohereApiBase string `json:"cohere_api_base,omitempty"`
|
||||
CohereApiKey string `json:"cohere_api_key,omitempty"`
|
||||
CohereApiKeyPlaintext string `json:"cohere_api_key_plaintext,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelDatabricksModelServingConfig struct {
|
||||
DatabricksApiToken string `json:"databricks_api_token,omitempty"`
|
||||
DatabricksApiTokenPlaintext string `json:"databricks_api_token_plaintext,omitempty"`
|
||||
DatabricksWorkspaceUrl string `json:"databricks_workspace_url"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig struct {
|
||||
PrivateKey string `json:"private_key,omitempty"`
|
||||
PrivateKeyPlaintext string `json:"private_key_plaintext,omitempty"`
|
||||
ProjectId string `json:"project_id,omitempty"`
|
||||
Region string `json:"region,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelOpenaiConfig struct {
|
||||
MicrosoftEntraClientId string `json:"microsoft_entra_client_id,omitempty"`
|
||||
MicrosoftEntraClientSecret string `json:"microsoft_entra_client_secret,omitempty"`
|
||||
MicrosoftEntraClientSecretPlaintext string `json:"microsoft_entra_client_secret_plaintext,omitempty"`
|
||||
MicrosoftEntraTenantId string `json:"microsoft_entra_tenant_id,omitempty"`
|
||||
OpenaiApiBase string `json:"openai_api_base,omitempty"`
|
||||
OpenaiApiKey string `json:"openai_api_key,omitempty"`
|
||||
OpenaiApiKeyPlaintext string `json:"openai_api_key_plaintext,omitempty"`
|
||||
OpenaiApiType string `json:"openai_api_type,omitempty"`
|
||||
OpenaiApiVersion string `json:"openai_api_version,omitempty"`
|
||||
OpenaiDeploymentName string `json:"openai_deployment_name,omitempty"`
|
||||
OpenaiOrganization string `json:"openai_organization,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelPalmConfig struct {
|
||||
PalmApiKey string `json:"palm_api_key,omitempty"`
|
||||
PalmApiKeyPlaintext string `json:"palm_api_key_plaintext,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModel struct {
|
||||
Name string `json:"name"`
|
||||
Provider string `json:"provider"`
|
||||
Task string `json:"task"`
|
||||
Ai21LabsConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAi21LabsConfig `json:"ai21labs_config,omitempty"`
|
||||
AmazonBedrockConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAmazonBedrockConfig `json:"amazon_bedrock_config,omitempty"`
|
||||
AnthropicConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAnthropicConfig `json:"anthropic_config,omitempty"`
|
||||
CohereConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelCohereConfig `json:"cohere_config,omitempty"`
|
||||
DatabricksModelServingConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelDatabricksModelServingConfig `json:"databricks_model_serving_config,omitempty"`
|
||||
GoogleCloudVertexAiConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig `json:"google_cloud_vertex_ai_config,omitempty"`
|
||||
OpenaiConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelOpenaiConfig `json:"openai_config,omitempty"`
|
||||
PalmConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelPalmConfig `json:"palm_config,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
DisplayName string `json:"display_name,omitempty"`
|
||||
Docs string `json:"docs,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedEntities struct {
|
||||
EntityName string `json:"entity_name,omitempty"`
|
||||
EntityVersion string `json:"entity_version,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
ExternalModel []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModel `json:"external_model,omitempty"`
|
||||
FoundationModel []DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel `json:"foundation_model,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfigServedModels struct {
|
||||
ModelName string `json:"model_name,omitempty"`
|
||||
ModelVersion string `json:"model_version,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsConfig struct {
|
||||
ServedEntities []DataSourceServingEndpointsEndpointsConfigServedEntities `json:"served_entities,omitempty"`
|
||||
ServedModels []DataSourceServingEndpointsEndpointsConfigServedModels `json:"served_models,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsState struct {
|
||||
ConfigUpdate string `json:"config_update,omitempty"`
|
||||
Ready string `json:"ready,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpointsTags struct {
|
||||
Key string `json:"key"`
|
||||
Value string `json:"value,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpointsEndpoints struct {
|
||||
CreationTimestamp int `json:"creation_timestamp,omitempty"`
|
||||
Creator string `json:"creator,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
LastUpdatedTimestamp int `json:"last_updated_timestamp,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Task string `json:"task,omitempty"`
|
||||
AiGateway []DataSourceServingEndpointsEndpointsAiGateway `json:"ai_gateway,omitempty"`
|
||||
Config []DataSourceServingEndpointsEndpointsConfig `json:"config,omitempty"`
|
||||
State []DataSourceServingEndpointsEndpointsState `json:"state,omitempty"`
|
||||
Tags []DataSourceServingEndpointsEndpointsTags `json:"tags,omitempty"`
|
||||
}
|
||||
|
||||
type DataSourceServingEndpoints struct {
|
||||
Endpoints []DataSourceServingEndpointsEndpoints `json:"endpoints,omitempty"`
|
||||
}
|
|
@ -33,6 +33,8 @@ type DataSources struct {
|
|||
MlflowModel map[string]any `json:"databricks_mlflow_model,omitempty"`
|
||||
MlflowModels map[string]any `json:"databricks_mlflow_models,omitempty"`
|
||||
MwsCredentials map[string]any `json:"databricks_mws_credentials,omitempty"`
|
||||
MwsNetworkConnectivityConfig map[string]any `json:"databricks_mws_network_connectivity_config,omitempty"`
|
||||
MwsNetworkConnectivityConfigs map[string]any `json:"databricks_mws_network_connectivity_configs,omitempty"`
|
||||
MwsWorkspaces map[string]any `json:"databricks_mws_workspaces,omitempty"`
|
||||
NodeType map[string]any `json:"databricks_node_type,omitempty"`
|
||||
Notebook map[string]any `json:"databricks_notebook,omitempty"`
|
||||
|
@ -40,10 +42,12 @@ type DataSources struct {
|
|||
NotificationDestinations map[string]any `json:"databricks_notification_destinations,omitempty"`
|
||||
Pipelines map[string]any `json:"databricks_pipelines,omitempty"`
|
||||
RegisteredModel map[string]any `json:"databricks_registered_model,omitempty"`
|
||||
RegisteredModelVersions map[string]any `json:"databricks_registered_model_versions,omitempty"`
|
||||
Schema map[string]any `json:"databricks_schema,omitempty"`
|
||||
Schemas map[string]any `json:"databricks_schemas,omitempty"`
|
||||
ServicePrincipal map[string]any `json:"databricks_service_principal,omitempty"`
|
||||
ServicePrincipals map[string]any `json:"databricks_service_principals,omitempty"`
|
||||
ServingEndpoints map[string]any `json:"databricks_serving_endpoints,omitempty"`
|
||||
Share map[string]any `json:"databricks_share,omitempty"`
|
||||
Shares map[string]any `json:"databricks_shares,omitempty"`
|
||||
SparkVersion map[string]any `json:"databricks_spark_version,omitempty"`
|
||||
|
@ -92,6 +96,8 @@ func NewDataSources() *DataSources {
|
|||
MlflowModel: make(map[string]any),
|
||||
MlflowModels: make(map[string]any),
|
||||
MwsCredentials: make(map[string]any),
|
||||
MwsNetworkConnectivityConfig: make(map[string]any),
|
||||
MwsNetworkConnectivityConfigs: make(map[string]any),
|
||||
MwsWorkspaces: make(map[string]any),
|
||||
NodeType: make(map[string]any),
|
||||
Notebook: make(map[string]any),
|
||||
|
@ -99,10 +105,12 @@ func NewDataSources() *DataSources {
|
|||
NotificationDestinations: make(map[string]any),
|
||||
Pipelines: make(map[string]any),
|
||||
RegisteredModel: make(map[string]any),
|
||||
RegisteredModelVersions: make(map[string]any),
|
||||
Schema: make(map[string]any),
|
||||
Schemas: make(map[string]any),
|
||||
ServicePrincipal: make(map[string]any),
|
||||
ServicePrincipals: make(map[string]any),
|
||||
ServingEndpoints: make(map[string]any),
|
||||
Share: make(map[string]any),
|
||||
Shares: make(map[string]any),
|
||||
SparkVersion: make(map[string]any),
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
|
||||
|
||||
package schema
|
||||
|
||||
type ResourceAppActiveDeploymentDeploymentArtifacts struct {
|
||||
SourceCodePath string `json:"source_code_path,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppActiveDeploymentStatus struct {
|
||||
Message string `json:"message,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppActiveDeployment struct {
|
||||
CreateTime string `json:"create_time,omitempty"`
|
||||
Creator string `json:"creator,omitempty"`
|
||||
DeploymentId string `json:"deployment_id,omitempty"`
|
||||
Mode string `json:"mode,omitempty"`
|
||||
SourceCodePath string `json:"source_code_path,omitempty"`
|
||||
UpdateTime string `json:"update_time,omitempty"`
|
||||
DeploymentArtifacts *ResourceAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
|
||||
Status *ResourceAppActiveDeploymentStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppAppStatus struct {
|
||||
Message string `json:"message,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppComputeStatus struct {
|
||||
Message string `json:"message,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppPendingDeploymentDeploymentArtifacts struct {
|
||||
SourceCodePath string `json:"source_code_path,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppPendingDeploymentStatus struct {
|
||||
Message string `json:"message,omitempty"`
|
||||
State string `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppPendingDeployment struct {
|
||||
CreateTime string `json:"create_time,omitempty"`
|
||||
Creator string `json:"creator,omitempty"`
|
||||
DeploymentId string `json:"deployment_id,omitempty"`
|
||||
Mode string `json:"mode,omitempty"`
|
||||
SourceCodePath string `json:"source_code_path,omitempty"`
|
||||
UpdateTime string `json:"update_time,omitempty"`
|
||||
DeploymentArtifacts *ResourceAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
|
||||
Status *ResourceAppPendingDeploymentStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceAppResourceJob struct {
|
||||
Id string `json:"id"`
|
||||
Permission string `json:"permission"`
|
||||
}
|
||||
|
||||
type ResourceAppResourceSecret struct {
|
||||
Key string `json:"key"`
|
||||
Permission string `json:"permission"`
|
||||
Scope string `json:"scope"`
|
||||
}
|
||||
|
||||
type ResourceAppResourceServingEndpoint struct {
|
||||
Name string `json:"name"`
|
||||
Permission string `json:"permission"`
|
||||
}
|
||||
|
||||
type ResourceAppResourceSqlWarehouse struct {
|
||||
Id string `json:"id"`
|
||||
Permission string `json:"permission"`
|
||||
}
|
||||
|
||||
type ResourceAppResource struct {
|
||||
Description string `json:"description,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Job *ResourceAppResourceJob `json:"job,omitempty"`
|
||||
Secret *ResourceAppResourceSecret `json:"secret,omitempty"`
|
||||
ServingEndpoint *ResourceAppResourceServingEndpoint `json:"serving_endpoint,omitempty"`
|
||||
SqlWarehouse *ResourceAppResourceSqlWarehouse `json:"sql_warehouse,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceApp struct {
|
||||
CreateTime string `json:"create_time,omitempty"`
|
||||
Creator string `json:"creator,omitempty"`
|
||||
DefaultSourceCodePath string `json:"default_source_code_path,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
Name string `json:"name"`
|
||||
ServicePrincipalId int `json:"service_principal_id,omitempty"`
|
||||
ServicePrincipalName string `json:"service_principal_name,omitempty"`
|
||||
UpdateTime string `json:"update_time,omitempty"`
|
||||
Updater string `json:"updater,omitempty"`
|
||||
Url string `json:"url,omitempty"`
|
||||
ActiveDeployment *ResourceAppActiveDeployment `json:"active_deployment,omitempty"`
|
||||
AppStatus *ResourceAppAppStatus `json:"app_status,omitempty"`
|
||||
ComputeStatus *ResourceAppComputeStatus `json:"compute_status,omitempty"`
|
||||
PendingDeployment *ResourceAppPendingDeployment `json:"pending_deployment,omitempty"`
|
||||
Resource []ResourceAppResource `json:"resource,omitempty"`
|
||||
}
|
|
@ -10,29 +10,30 @@ type ResourcePermissionsAccessControl struct {
|
|||
}
|
||||
|
||||
type ResourcePermissions struct {
|
||||
Authorization string `json:"authorization,omitempty"`
|
||||
ClusterId string `json:"cluster_id,omitempty"`
|
||||
ClusterPolicyId string `json:"cluster_policy_id,omitempty"`
|
||||
DashboardId string `json:"dashboard_id,omitempty"`
|
||||
DirectoryId string `json:"directory_id,omitempty"`
|
||||
DirectoryPath string `json:"directory_path,omitempty"`
|
||||
ExperimentId string `json:"experiment_id,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
InstancePoolId string `json:"instance_pool_id,omitempty"`
|
||||
JobId string `json:"job_id,omitempty"`
|
||||
NotebookId string `json:"notebook_id,omitempty"`
|
||||
NotebookPath string `json:"notebook_path,omitempty"`
|
||||
ObjectType string `json:"object_type,omitempty"`
|
||||
PipelineId string `json:"pipeline_id,omitempty"`
|
||||
RegisteredModelId string `json:"registered_model_id,omitempty"`
|
||||
RepoId string `json:"repo_id,omitempty"`
|
||||
RepoPath string `json:"repo_path,omitempty"`
|
||||
ServingEndpointId string `json:"serving_endpoint_id,omitempty"`
|
||||
SqlAlertId string `json:"sql_alert_id,omitempty"`
|
||||
SqlDashboardId string `json:"sql_dashboard_id,omitempty"`
|
||||
SqlEndpointId string `json:"sql_endpoint_id,omitempty"`
|
||||
SqlQueryId string `json:"sql_query_id,omitempty"`
|
||||
WorkspaceFileId string `json:"workspace_file_id,omitempty"`
|
||||
WorkspaceFilePath string `json:"workspace_file_path,omitempty"`
|
||||
AccessControl []ResourcePermissionsAccessControl `json:"access_control,omitempty"`
|
||||
Authorization string `json:"authorization,omitempty"`
|
||||
ClusterId string `json:"cluster_id,omitempty"`
|
||||
ClusterPolicyId string `json:"cluster_policy_id,omitempty"`
|
||||
DashboardId string `json:"dashboard_id,omitempty"`
|
||||
DirectoryId string `json:"directory_id,omitempty"`
|
||||
DirectoryPath string `json:"directory_path,omitempty"`
|
||||
ExperimentId string `json:"experiment_id,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
InstancePoolId string `json:"instance_pool_id,omitempty"`
|
||||
JobId string `json:"job_id,omitempty"`
|
||||
NotebookId string `json:"notebook_id,omitempty"`
|
||||
NotebookPath string `json:"notebook_path,omitempty"`
|
||||
ObjectType string `json:"object_type,omitempty"`
|
||||
PipelineId string `json:"pipeline_id,omitempty"`
|
||||
RegisteredModelId string `json:"registered_model_id,omitempty"`
|
||||
RepoId string `json:"repo_id,omitempty"`
|
||||
RepoPath string `json:"repo_path,omitempty"`
|
||||
ServingEndpointId string `json:"serving_endpoint_id,omitempty"`
|
||||
SqlAlertId string `json:"sql_alert_id,omitempty"`
|
||||
SqlDashboardId string `json:"sql_dashboard_id,omitempty"`
|
||||
SqlEndpointId string `json:"sql_endpoint_id,omitempty"`
|
||||
SqlQueryId string `json:"sql_query_id,omitempty"`
|
||||
VectorSearchEndpointId string `json:"vector_search_endpoint_id,omitempty"`
|
||||
WorkspaceFileId string `json:"workspace_file_id,omitempty"`
|
||||
WorkspaceFilePath string `json:"workspace_file_path,omitempty"`
|
||||
AccessControl []ResourcePermissionsAccessControl `json:"access_control,omitempty"`
|
||||
}
|
||||
|
|
|
@ -33,8 +33,8 @@ type ResourceQualityMonitorNotificationsOnNewClassificationTagDetected struct {
|
|||
}
|
||||
|
||||
type ResourceQualityMonitorNotifications struct {
|
||||
OnFailure *ResourceQualityMonitorNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnNewClassificationTagDetected *ResourceQualityMonitorNotificationsOnNewClassificationTagDetected `json:"on_new_classification_tag_detected,omitempty"`
|
||||
OnFailure []ResourceQualityMonitorNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnNewClassificationTagDetected []ResourceQualityMonitorNotificationsOnNewClassificationTagDetected `json:"on_new_classification_tag_detected,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceQualityMonitorSchedule struct {
|
||||
|
@ -52,25 +52,25 @@ type ResourceQualityMonitorTimeSeries struct {
|
|||
}
|
||||
|
||||
type ResourceQualityMonitor struct {
|
||||
AssetsDir string `json:"assets_dir"`
|
||||
BaselineTableName string `json:"baseline_table_name,omitempty"`
|
||||
DashboardId string `json:"dashboard_id,omitempty"`
|
||||
DriftMetricsTableName string `json:"drift_metrics_table_name,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
LatestMonitorFailureMsg string `json:"latest_monitor_failure_msg,omitempty"`
|
||||
MonitorVersion string `json:"monitor_version,omitempty"`
|
||||
OutputSchemaName string `json:"output_schema_name"`
|
||||
ProfileMetricsTableName string `json:"profile_metrics_table_name,omitempty"`
|
||||
SkipBuiltinDashboard bool `json:"skip_builtin_dashboard,omitempty"`
|
||||
SlicingExprs []string `json:"slicing_exprs,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
TableName string `json:"table_name"`
|
||||
WarehouseId string `json:"warehouse_id,omitempty"`
|
||||
CustomMetrics []ResourceQualityMonitorCustomMetrics `json:"custom_metrics,omitempty"`
|
||||
DataClassificationConfig *ResourceQualityMonitorDataClassificationConfig `json:"data_classification_config,omitempty"`
|
||||
InferenceLog *ResourceQualityMonitorInferenceLog `json:"inference_log,omitempty"`
|
||||
Notifications *ResourceQualityMonitorNotifications `json:"notifications,omitempty"`
|
||||
Schedule *ResourceQualityMonitorSchedule `json:"schedule,omitempty"`
|
||||
Snapshot *ResourceQualityMonitorSnapshot `json:"snapshot,omitempty"`
|
||||
TimeSeries *ResourceQualityMonitorTimeSeries `json:"time_series,omitempty"`
|
||||
AssetsDir string `json:"assets_dir"`
|
||||
BaselineTableName string `json:"baseline_table_name,omitempty"`
|
||||
DashboardId string `json:"dashboard_id,omitempty"`
|
||||
DriftMetricsTableName string `json:"drift_metrics_table_name,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
LatestMonitorFailureMsg string `json:"latest_monitor_failure_msg,omitempty"`
|
||||
MonitorVersion string `json:"monitor_version,omitempty"`
|
||||
OutputSchemaName string `json:"output_schema_name"`
|
||||
ProfileMetricsTableName string `json:"profile_metrics_table_name,omitempty"`
|
||||
SkipBuiltinDashboard bool `json:"skip_builtin_dashboard,omitempty"`
|
||||
SlicingExprs []string `json:"slicing_exprs,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
TableName string `json:"table_name"`
|
||||
WarehouseId string `json:"warehouse_id,omitempty"`
|
||||
CustomMetrics []ResourceQualityMonitorCustomMetrics `json:"custom_metrics,omitempty"`
|
||||
DataClassificationConfig []ResourceQualityMonitorDataClassificationConfig `json:"data_classification_config,omitempty"`
|
||||
InferenceLog []ResourceQualityMonitorInferenceLog `json:"inference_log,omitempty"`
|
||||
Notifications []ResourceQualityMonitorNotifications `json:"notifications,omitempty"`
|
||||
Schedule []ResourceQualityMonitorSchedule `json:"schedule,omitempty"`
|
||||
Snapshot []ResourceQualityMonitorSnapshot `json:"snapshot,omitempty"`
|
||||
TimeSeries []ResourceQualityMonitorTimeSeries `json:"time_series,omitempty"`
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ package schema
|
|||
type Resources struct {
|
||||
AccessControlRuleSet map[string]any `json:"databricks_access_control_rule_set,omitempty"`
|
||||
Alert map[string]any `json:"databricks_alert,omitempty"`
|
||||
App map[string]any `json:"databricks_app,omitempty"`
|
||||
ArtifactAllowlist map[string]any `json:"databricks_artifact_allowlist,omitempty"`
|
||||
AutomaticClusterUpdateWorkspaceSetting map[string]any `json:"databricks_automatic_cluster_update_workspace_setting,omitempty"`
|
||||
AwsS3Mount map[string]any `json:"databricks_aws_s3_mount,omitempty"`
|
||||
|
@ -111,6 +112,7 @@ func NewResources() *Resources {
|
|||
return &Resources{
|
||||
AccessControlRuleSet: make(map[string]any),
|
||||
Alert: make(map[string]any),
|
||||
App: make(map[string]any),
|
||||
ArtifactAllowlist: make(map[string]any),
|
||||
AutomaticClusterUpdateWorkspaceSetting: make(map[string]any),
|
||||
AwsS3Mount: make(map[string]any),
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/cli/bundle/deploy"
|
||||
"github.com/databricks/cli/bundle/deploy/apps"
|
||||
"github.com/databricks/cli/bundle/deploy/files"
|
||||
"github.com/databricks/cli/bundle/deploy/lock"
|
||||
"github.com/databricks/cli/bundle/deploy/metadata"
|
||||
|
@ -136,6 +137,7 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
|
|||
bundle.Seq(
|
||||
bundle.LogString("Deploying resources..."),
|
||||
terraform.Apply(),
|
||||
apps.Deploy(),
|
||||
),
|
||||
bundle.Seq(
|
||||
terraform.StatePush(),
|
||||
|
|
|
@ -37,6 +37,8 @@ func Initialize() bundle.Mutator {
|
|||
mutator.MergeJobParameters(),
|
||||
mutator.MergeJobTasks(),
|
||||
mutator.MergePipelineClusters(),
|
||||
mutator.MergeApps(),
|
||||
|
||||
mutator.InitializeWorkspaceClient(),
|
||||
mutator.PopulateCurrentUser(),
|
||||
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
bundle:
|
||||
name: apps
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
variables:
|
||||
app_config:
|
||||
type: complex
|
||||
default:
|
||||
command:
|
||||
- "python"
|
||||
- "app.py"
|
||||
env:
|
||||
- name: SOME_ENV_VARIABLE
|
||||
value: "Some value"
|
||||
|
||||
resources:
|
||||
apps:
|
||||
my_app:
|
||||
name: "my-app"
|
||||
description: "My App"
|
||||
source_code_path: ./app
|
||||
config: ${var.app_config}
|
||||
|
||||
resources:
|
||||
- name: "my-sql-warehouse"
|
||||
sql_warehouse:
|
||||
id: 1234
|
||||
permission: "CAN_USE"
|
||||
- name: "my-job"
|
||||
job:
|
||||
id: 5678
|
||||
permission: "CAN_MANAGE_RUN"
|
||||
permissions:
|
||||
- user_name: "foo@bar.com"
|
||||
level: "CAN_VIEW"
|
||||
- service_principal_name: "my_sp"
|
||||
level: "CAN_MANAGE"
|
||||
|
||||
|
||||
targets:
|
||||
default:
|
||||
|
||||
development:
|
||||
variables:
|
||||
app_config:
|
||||
command:
|
||||
- "python"
|
||||
- "dev.py"
|
||||
env:
|
||||
- name: SOME_ENV_VARIABLE_2
|
||||
value: "Some value 2"
|
||||
resources:
|
||||
apps:
|
||||
my_app:
|
||||
source_code_path: ./app-dev
|
||||
resources:
|
||||
- name: "my-sql-warehouse"
|
||||
sql_warehouse:
|
||||
id: 1234
|
||||
permission: "CAN_MANAGE"
|
||||
- name: "my-job"
|
||||
job:
|
||||
id: 5678
|
||||
permission: "CAN_MANAGE"
|
||||
- name: "my-secret"
|
||||
secret:
|
||||
key: "key"
|
||||
scope: "scope"
|
||||
permission: "CAN_USE"
|
|
@ -0,0 +1,61 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestApps(t *testing.T) {
|
||||
b := load(t, "./apps")
|
||||
assert.Equal(t, "apps", b.Config.Bundle.Name)
|
||||
|
||||
diags := bundle.Apply(context.Background(), b,
|
||||
bundle.Seq(
|
||||
mutator.SetVariables(),
|
||||
mutator.ResolveVariableReferences("variables"),
|
||||
))
|
||||
assert.Empty(t, diags)
|
||||
|
||||
app := b.Config.Resources.Apps["my_app"]
|
||||
assert.Equal(t, "my-app", app.Name)
|
||||
assert.Equal(t, "My App", app.Description)
|
||||
assert.Equal(t, []interface{}{"python", "app.py"}, app.Config["command"])
|
||||
assert.Equal(t, []interface{}{map[string]interface{}{"name": "SOME_ENV_VARIABLE", "value": "Some value"}}, app.Config["env"])
|
||||
|
||||
assert.Len(t, app.Resources, 2)
|
||||
assert.Equal(t, "1234", app.Resources[0].SqlWarehouse.Id)
|
||||
assert.Equal(t, "CAN_USE", string(app.Resources[0].SqlWarehouse.Permission))
|
||||
assert.Equal(t, "5678", app.Resources[1].Job.Id)
|
||||
assert.Equal(t, "CAN_MANAGE_RUN", string(app.Resources[1].Job.Permission))
|
||||
}
|
||||
|
||||
func TestAppsOverride(t *testing.T) {
|
||||
b := loadTarget(t, "./apps", "development")
|
||||
assert.Equal(t, "apps", b.Config.Bundle.Name)
|
||||
|
||||
diags := bundle.Apply(context.Background(), b,
|
||||
bundle.Seq(
|
||||
mutator.SetVariables(),
|
||||
mutator.ResolveVariableReferences("variables"),
|
||||
))
|
||||
assert.Empty(t, diags)
|
||||
app := b.Config.Resources.Apps["my_app"]
|
||||
assert.Equal(t, "my-app", app.Name)
|
||||
assert.Equal(t, "My App", app.Description)
|
||||
assert.Equal(t, []interface{}{"python", "dev.py"}, app.Config["command"])
|
||||
assert.Equal(t, []interface{}{map[string]interface{}{"name": "SOME_ENV_VARIABLE_2", "value": "Some value 2"}}, app.Config["env"])
|
||||
|
||||
assert.Len(t, app.Resources, 3)
|
||||
assert.Equal(t, "1234", app.Resources[0].SqlWarehouse.Id)
|
||||
assert.Equal(t, "CAN_MANAGE", string(app.Resources[0].SqlWarehouse.Permission))
|
||||
assert.Equal(t, "5678", app.Resources[1].Job.Id)
|
||||
assert.Equal(t, "CAN_MANAGE", string(app.Resources[1].Job.Permission))
|
||||
assert.Equal(t, "key", app.Resources[2].Secret.Key)
|
||||
assert.Equal(t, "scope", app.Resources[2].Secret.Scope)
|
||||
assert.Equal(t, "CAN_USE", string(app.Resources[2].Secret.Permission))
|
||||
|
||||
}
|
|
@ -46,6 +46,7 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) {
|
|||
mutator.MergeJobParameters(),
|
||||
mutator.MergeJobTasks(),
|
||||
mutator.MergePipelineClusters(),
|
||||
mutator.MergeApps(),
|
||||
))
|
||||
return b, diags
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue