Use `dyn.Value` as input to generating Terraform JSON (#1218)

## Changes

This builds on #1098 and uses the `dyn.Value` representation of the
bundle configuration to generate the Terraform JSON definition of
resources in the bundle.

The existing code (in `BundleToTerraform`) was not great and in an
effort to slightly improve this, I added a package `tfdyn` that includes
dedicated files for each resource type. Every resource type has its own
conversion type that takes the `dyn.Value` of the bundle-side resource
and converts it into Terraform resources (e.g. a job and optionally its
permissions).

Because we now use a `dyn.Value` as input, we can represent and emit
zero-values that have so far been omitted. For example, setting
`num_workers: 0` in your bundle configuration now propagates all the way
to the Terraform JSON definition.

## Tests

* Unit tests for every converter. I reused the test inputs from
`convert_test.go`.
* Equivalence tests in every existing test case checks that the
resulting JSON is identical.
* I manually compared the TF JSON file generated by the CLI from the
main branch and from this PR on all of our bundles and bundle examples
(internal and external) and found the output doesn't change (with the
exception of the odd zero-value being included by the version in this
PR).
This commit is contained in:
Pieter Noordhuis 2024-02-16 21:54:38 +01:00 committed by GitHub
parent 87dd46a3f8
commit f70ec359dc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 1291 additions and 1 deletions

View File

@ -1,13 +1,16 @@
package terraform
import (
"context"
"encoding/json"
"fmt"
"reflect"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/deploy/terraform/tfdyn"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
tfjson "github.com/hashicorp/terraform-json"
)
@ -228,6 +231,64 @@ func BundleToTerraform(config *config.Root) *schema.Root {
return tfroot
}
// BundleToTerraformWithDynValue converts resources in a bundle configuration
// to the equivalent Terraform JSON representation.
func BundleToTerraformWithDynValue(ctx context.Context, root dyn.Value) (*schema.Root, error) {
tfroot := schema.NewRoot()
tfroot.Provider = schema.NewProviders()
// Convert each resource in the bundle to the equivalent Terraform representation.
resources, err := dyn.Get(root, "resources")
if err != nil {
// If the resources key is missing, return an empty root.
if dyn.IsNoSuchKeyError(err) {
return tfroot, nil
}
return nil, err
}
tfroot.Resource = schema.NewResources()
numResources := 0
_, err = dyn.Walk(resources, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if len(p) < 2 {
return v, nil
}
typ := p[0].Key()
key := p[1].Key()
// Lookup the converter based on the resource type.
c, ok := tfdyn.GetConverter(typ)
if !ok {
return dyn.InvalidValue, fmt.Errorf("no converter for resource type %s", typ)
}
// Convert resource to Terraform representation.
err := c.Convert(ctx, key, v, tfroot.Resource)
if err != nil {
return dyn.InvalidValue, err
}
numResources++
// Skip traversal of the resource itself.
return v, dyn.ErrSkip
})
if err != nil {
return nil, err
}
// We explicitly set "resource" to nil to omit it from a JSON encoding.
// This is required because the terraform CLI requires >= 1 resources defined
// if the "resource" property is used in a .tf.json file.
if numResources == 0 {
tfroot.Resource = nil
}
return tfroot, nil
}
func TerraformToBundle(state *tfjson.State, config *config.Root) error {
if state.Values != nil && state.Values.RootModule != nil {
for _, resource := range state.Values.RootModule.Resources {

View File

@ -1,12 +1,16 @@
package terraform
import (
"context"
"encoding/json"
"reflect"
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
@ -65,6 +69,8 @@ func TestBundleToTerraformJob(t *testing.T) {
assert.Equal(t, "param1", resource.Parameter[0].Name)
assert.Equal(t, "param2", resource.Parameter[1].Name)
assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformJobPermissions(t *testing.T) {
@ -92,6 +98,8 @@ func TestBundleToTerraformJobPermissions(t *testing.T) {
assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
@ -128,6 +136,8 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
require.Len(t, resource.Task, 1)
require.Len(t, resource.Task[0].Library, 1)
assert.Equal(t, "mlflow", resource.Task[0].Library[0].Pypi.Package)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformPipeline(t *testing.T) {
@ -188,6 +198,8 @@ func TestBundleToTerraformPipeline(t *testing.T) {
assert.Equal(t, resource.Notification[1].Alerts, []string{"on-update-failure", "on-flow-failure"})
assert.Equal(t, resource.Notification[1].EmailRecipients, []string{"jane@doe.com", "john@doe.com"})
assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformPipelinePermissions(t *testing.T) {
@ -215,6 +227,8 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) {
assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformModel(t *testing.T) {
@ -254,10 +268,15 @@ func TestBundleToTerraformModel(t *testing.T) {
assert.Equal(t, "k2", resource.Tags[1].Key)
assert.Equal(t, "v2", resource.Tags[1].Value)
assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformModelPermissions(t *testing.T) {
var src = resources.MlflowModel{
Model: &ml.Model{
Name: "name",
},
Permissions: []resources.Permission{
{
Level: "CAN_READ",
@ -281,6 +300,8 @@ func TestBundleToTerraformModelPermissions(t *testing.T) {
assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_READ", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformExperiment(t *testing.T) {
@ -303,10 +324,15 @@ func TestBundleToTerraformExperiment(t *testing.T) {
assert.Equal(t, "name", resource.Name)
assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformExperimentPermissions(t *testing.T) {
var src = resources.MlflowExperiment{
Experiment: &ml.Experiment{
Name: "name",
},
Permissions: []resources.Permission{
{
Level: "CAN_READ",
@ -331,6 +357,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) {
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_READ", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformModelServing(t *testing.T) {
@ -377,10 +404,15 @@ func TestBundleToTerraformModelServing(t *testing.T) {
assert.Equal(t, "model_name-1", resource.Config.TrafficConfig.Routes[0].ServedModelName)
assert.Equal(t, 100, resource.Config.TrafficConfig.Routes[0].TrafficPercentage)
assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformModelServingPermissions(t *testing.T) {
var src = resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name",
},
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
@ -405,6 +437,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformRegisteredModel(t *testing.T) {
@ -433,10 +466,17 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) {
assert.Equal(t, "schema", resource.SchemaName)
assert.Equal(t, "comment", resource.Comment)
assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
}
func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
var src = resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name",
CatalogName: "catalog",
SchemaName: "schema",
},
Grants: []resources.Grant{
{
Privileges: []string{"EXECUTE"},
@ -460,6 +500,8 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
assert.Len(t, resource.Grant, 1)
assert.Equal(t, "jane@doe.com", resource.Grant[0].Principal)
assert.Equal(t, "EXECUTE", resource.Grant[0].Privileges[0])
bundleToTerraformEquivalenceTest(t, &config)
}
func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
@ -827,3 +869,25 @@ func AssertFullResourceCoverage(t *testing.T, config *config.Root) {
}
}
}
func assertEqualTerraformRoot(t *testing.T, a, b *schema.Root) {
ba, err := json.Marshal(a)
require.NoError(t, err)
bb, err := json.Marshal(b)
require.NoError(t, err)
assert.JSONEq(t, string(ba), string(bb))
}
func bundleToTerraformEquivalenceTest(t *testing.T, config *config.Root) {
t.Run("dyn equivalence", func(t *testing.T) {
tf1 := BundleToTerraform(config)
vin, err := convert.FromTyped(config, dyn.NilValue)
require.NoError(t, err)
tf2, err := BundleToTerraformWithDynValue(context.Background(), vin)
require.NoError(t, err)
// Compare roots
assertEqualTerraformRoot(t, tf1, tf2)
})
}

View File

@ -0,0 +1,23 @@
package tfdyn
import (
"context"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
)
type Converter interface {
Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error
}
var converters = map[string]Converter{}
func GetConverter(name string) (Converter, bool) {
c, ok := converters[name]
return c, ok
}
func registerConverter(name string, c Converter) {
converters[name] = c
}

View File

@ -0,0 +1,45 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
)
func convertExperimentResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourceMlflowExperiment{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "experiment normalization diagnostic: %s", diag.Summary)
}
return vout, nil
}
type experimentConverter struct{}
func (experimentConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertExperimentResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.MlflowExperiment[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.ExperimentId = fmt.Sprintf("${databricks_mlflow_experiment.%s.id}", key)
out.Permissions["mlflow_experiment_"+key] = permissions
}
return nil
}
func init() {
registerConverter("experiments", experimentConverter{})
}

View File

@ -0,0 +1,52 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/ml"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertExperiment(t *testing.T) {
var src = resources.MlflowExperiment{
Experiment: &ml.Experiment{
Name: "name",
},
Permissions: []resources.Permission{
{
Level: "CAN_READ",
UserName: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = experimentConverter{}.Convert(ctx, "my_experiment", vin, out)
require.NoError(t, err)
// Assert equality on the experiment
assert.Equal(t, map[string]any{
"name": "name",
}, out.MlflowExperiment["my_experiment"])
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
ExperimentId: "${databricks_mlflow_experiment.my_experiment.id}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_READ",
UserName: "jane@doe.com",
},
},
}, out.Permissions["mlflow_experiment_my_experiment"])
}

View File

@ -0,0 +1,39 @@
package tfdyn
import (
"context"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
)
func convertGrantsResource(ctx context.Context, vin dyn.Value) *schema.ResourceGrants {
grants, ok := vin.Get("grants").AsSequence()
if !ok || len(grants) == 0 {
return nil
}
resource := &schema.ResourceGrants{}
for _, permission := range grants {
principal, _ := permission.Get("principal").AsString()
v, _ := permission.Get("privileges").AsSequence()
// Turn privileges into a slice of strings.
var privileges []string
for _, privilege := range v {
str, ok := privilege.AsString()
if !ok {
continue
}
privileges = append(privileges, str)
}
resource.Grant = append(resource.Grant, schema.ResourceGrantsGrant{
Principal: principal,
Privileges: privileges,
})
}
return resource
}

View File

@ -0,0 +1,71 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertGrants(t *testing.T) {
var src = resources.RegisteredModel{
Grants: []resources.Grant{
{
Privileges: []string{"EXECUTE", "FOO"},
Principal: "jane@doe.com",
},
{
Privileges: []string{"EXECUTE", "BAR"},
Principal: "spn",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
resource := convertGrantsResource(ctx, vin)
require.NotNil(t, resource)
assert.Equal(t, []schema.ResourceGrantsGrant{
{
Privileges: []string{"EXECUTE", "FOO"},
Principal: "jane@doe.com",
},
{
Privileges: []string{"EXECUTE", "BAR"},
Principal: "spn",
},
}, resource.Grant)
}
func TestConvertGrantsNil(t *testing.T) {
var src = resources.RegisteredModel{
Grants: nil,
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
resource := convertGrantsResource(ctx, vin)
assert.Nil(t, resource)
}
func TestConvertGrantsEmpty(t *testing.T) {
var src = resources.RegisteredModel{
Grants: []resources.Grant{},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
resource := convertGrantsResource(ctx, vin)
assert.Nil(t, resource)
}

View File

@ -0,0 +1,87 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/jobs"
)
func convertJobResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Normalize the input value to the underlying job schema.
// This removes superfluous keys and adapts the input to the expected schema.
vin, diags := convert.Normalize(jobs.JobSettings{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "job normalization diagnostic: %s", diag.Summary)
}
// Modify top-level keys.
vout, err := renameKeys(vin, map[string]string{
"tasks": "task",
"job_clusters": "job_cluster",
"parameters": "parameter",
})
if err != nil {
return dyn.InvalidValue, err
}
// Modify keys in the "git_source" block
vout, err = dyn.Map(vout, "git_source", func(v dyn.Value) (dyn.Value, error) {
return renameKeys(v, map[string]string{
"git_branch": "branch",
"git_commit": "commit",
"git_provider": "provider",
"git_tag": "tag",
"git_url": "url",
})
})
if err != nil {
return dyn.InvalidValue, err
}
// Modify keys in the "task" blocks
vout, err = dyn.Map(vout, "task", dyn.Foreach(func(v dyn.Value) (dyn.Value, error) {
return renameKeys(v, map[string]string{
"libraries": "library",
})
}))
if err != nil {
return dyn.InvalidValue, err
}
// Normalize the output value to the target schema.
vout, diags = convert.Normalize(schema.ResourceJob{}, vout)
for _, diag := range diags {
log.Debugf(ctx, "job normalization diagnostic: %s", diag.Summary)
}
return vout, err
}
type jobConverter struct{}
func (jobConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertJobResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.Job[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.JobId = fmt.Sprintf("${databricks_job.%s.id}", key)
out.Permissions["job_"+key] = permissions
}
return nil
}
func init() {
registerConverter("jobs", jobConverter{})
}

View File

@ -0,0 +1,129 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertJob(t *testing.T) {
var src = resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "key",
NewCluster: &compute.ClusterSpec{
SparkVersion: "10.4.x-scala2.12",
},
},
},
GitSource: &jobs.GitSource{
GitProvider: jobs.GitProviderGitHub,
GitUrl: "https://github.com/foo/bar",
},
Parameters: []jobs.JobParameterDefinition{
{
Name: "param1",
Default: "default1",
},
{
Name: "param2",
Default: "default2",
},
},
Tasks: []jobs.Task{
{
TaskKey: "task_key",
JobClusterKey: "job_cluster_key",
Libraries: []compute.Library{
{
Pypi: &compute.PythonPyPiLibrary{
Package: "package",
},
},
{
Whl: "/path/to/my.whl",
},
},
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = jobConverter{}.Convert(ctx, "my_job", vin, out)
require.NoError(t, err)
// Assert equality on the job
assert.Equal(t, map[string]any{
"name": "my job",
"job_cluster": []any{
map[string]any{
"job_cluster_key": "key",
"new_cluster": map[string]any{
"spark_version": "10.4.x-scala2.12",
},
},
},
"git_source": map[string]any{
"provider": "gitHub",
"url": "https://github.com/foo/bar",
},
"parameter": []any{
map[string]any{
"name": "param1",
"default": "default1",
},
map[string]any{
"name": "param2",
"default": "default2",
},
},
"task": []any{
map[string]any{
"task_key": "task_key",
"job_cluster_key": "job_cluster_key",
"library": []any{
map[string]any{
"pypi": map[string]any{
"package": "package",
},
},
map[string]any{
"whl": "/path/to/my.whl",
},
},
},
},
}, out.Job["my_job"])
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
JobId: "${databricks_job.my_job.id}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}, out.Permissions["job_my_job"])
}

View File

@ -0,0 +1,45 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
)
func convertModelResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourceMlflowModel{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "model normalization diagnostic: %s", diag.Summary)
}
return vout, nil
}
type modelConverter struct{}
func (modelConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertModelResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.MlflowModel[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.RegisteredModelId = fmt.Sprintf("${databricks_mlflow_model.%s.registered_model_id}", key)
out.Permissions["mlflow_model_"+key] = permissions
}
return nil
}
func init() {
registerConverter("models", modelConverter{})
}

View File

@ -0,0 +1,45 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
)
func convertModelServingEndpointResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourceModelServing{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "model serving endpoint normalization diagnostic: %s", diag.Summary)
}
return vout, nil
}
type modelServingEndpointConverter struct{}
func (modelServingEndpointConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertModelServingEndpointResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.ModelServing[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.ServingEndpointId = fmt.Sprintf("${databricks_model_serving.%s.serving_endpoint_id}", key)
out.Permissions["model_serving_"+key] = permissions
}
return nil
}
func init() {
registerConverter("model_serving_endpoints", modelServingEndpointConverter{})
}

View File

@ -0,0 +1,88 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/serving"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertModelServingEndpoint(t *testing.T) {
var src = resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name",
Config: serving.EndpointCoreConfigInput{
ServedModels: []serving.ServedModelInput{
{
ModelName: "model_name",
ModelVersion: "1",
ScaleToZeroEnabled: true,
WorkloadSize: "Small",
},
},
TrafficConfig: &serving.TrafficConfig{
Routes: []serving.Route{
{
ServedModelName: "model_name-1",
TrafficPercentage: 100,
},
},
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = modelServingEndpointConverter{}.Convert(ctx, "my_model_serving_endpoint", vin, out)
require.NoError(t, err)
// Assert equality on the model serving endpoint
assert.Equal(t, map[string]any{
"name": "name",
"config": map[string]any{
"served_models": []any{
map[string]any{
"model_name": "model_name",
"model_version": "1",
"scale_to_zero_enabled": true,
"workload_size": "Small",
},
},
"traffic_config": map[string]any{
"routes": []any{
map[string]any{
"served_model_name": "model_name-1",
"traffic_percentage": int64(100),
},
},
},
},
}, out.ModelServing["my_model_serving_endpoint"])
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
ServingEndpointId: "${databricks_model_serving.my_model_serving_endpoint.serving_endpoint_id}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}, out.Permissions["model_serving_my_model_serving_endpoint"])
}

View File

@ -0,0 +1,74 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/ml"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertModel(t *testing.T) {
var src = resources.MlflowModel{
Model: &ml.Model{
Name: "name",
Description: "description",
Tags: []ml.ModelTag{
{
Key: "k1",
Value: "v1",
},
{
Key: "k2",
Value: "v2",
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_READ",
UserName: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = modelConverter{}.Convert(ctx, "my_model", vin, out)
require.NoError(t, err)
// Assert equality on the model
assert.Equal(t, map[string]any{
"name": "name",
"description": "description",
"tags": []any{
map[string]any{
"key": "k1",
"value": "v1",
},
map[string]any{
"key": "k2",
"value": "v2",
},
},
}, out.MlflowModel["my_model"])
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
RegisteredModelId: "${databricks_mlflow_model.my_model.registered_model_id}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_READ",
UserName: "jane@doe.com",
},
},
}, out.Permissions["mlflow_model_my_model"])
}

View File

@ -0,0 +1,32 @@
package tfdyn
import (
"context"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
)
func convertPermissionsResource(ctx context.Context, vin dyn.Value) *schema.ResourcePermissions {
permissions, ok := vin.Get("permissions").AsSequence()
if !ok || len(permissions) == 0 {
return nil
}
resource := &schema.ResourcePermissions{}
for _, permission := range permissions {
level, _ := permission.Get("level").AsString()
userName, _ := permission.Get("user_name").AsString()
groupName, _ := permission.Get("group_name").AsString()
servicePrincipalName, _ := permission.Get("service_principal_name").AsString()
resource.AccessControl = append(resource.AccessControl, schema.ResourcePermissionsAccessControl{
PermissionLevel: level,
UserName: userName,
GroupName: groupName,
ServicePrincipalName: servicePrincipalName,
})
}
return resource
}

View File

@ -0,0 +1,85 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertPermissions(t *testing.T) {
var src = resources.Job{
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
UserName: "jane@doe.com",
},
{
Level: "CAN_MANAGE",
GroupName: "special admins",
},
{
Level: "CAN_RUN",
ServicePrincipalName: "spn",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
resource := convertPermissionsResource(ctx, vin)
require.NotNil(t, resource)
assert.Equal(t, []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_VIEW",
UserName: "jane@doe.com",
GroupName: "",
ServicePrincipalName: "",
},
{
PermissionLevel: "CAN_MANAGE",
UserName: "",
GroupName: "special admins",
ServicePrincipalName: "",
},
{
PermissionLevel: "CAN_RUN",
UserName: "",
GroupName: "",
ServicePrincipalName: "spn",
},
}, resource.AccessControl)
}
func TestConvertPermissionsNil(t *testing.T) {
var src = resources.Job{
Permissions: nil,
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
resource := convertPermissionsResource(ctx, vin)
assert.Nil(t, resource)
}
func TestConvertPermissionsEmpty(t *testing.T) {
var src = resources.Job{
Permissions: []resources.Permission{},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
resource := convertPermissionsResource(ctx, vin)
assert.Nil(t, resource)
}

View File

@ -0,0 +1,55 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
)
func convertPipelineResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Modify top-level keys.
vout, err := renameKeys(vin, map[string]string{
"libraries": "library",
"clusters": "cluster",
"notifications": "notification",
})
if err != nil {
return dyn.InvalidValue, err
}
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourcePipeline{}, vout)
for _, diag := range diags {
log.Debugf(ctx, "pipeline normalization diagnostic: %s", diag.Summary)
}
return vout, err
}
type pipelineConverter struct{}
func (pipelineConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertPipelineResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.Pipeline[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.PipelineId = fmt.Sprintf("${databricks_pipeline.%s.id}", key)
out.Permissions["pipeline_"+key] = permissions
}
return nil
}
func init() {
registerConverter("pipelines", pipelineConverter{})
}

View File

@ -0,0 +1,128 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertPipeline(t *testing.T) {
var src = resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{
Name: "my pipeline",
Libraries: []pipelines.PipelineLibrary{
{
Notebook: &pipelines.NotebookLibrary{
Path: "notebook path",
},
},
{
File: &pipelines.FileLibrary{
Path: "file path",
},
},
},
Notifications: []pipelines.Notifications{
{
Alerts: []string{
"on-update-fatal-failure",
},
EmailRecipients: []string{
"jane@doe.com",
},
},
{
Alerts: []string{
"on-update-failure",
"on-flow-failure",
},
EmailRecipients: []string{
"jane@doe.com",
"john@doe.com",
},
},
},
Clusters: []pipelines.PipelineCluster{
{
Label: "default",
NumWorkers: 1,
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = pipelineConverter{}.Convert(ctx, "my_pipeline", vin, out)
require.NoError(t, err)
// Assert equality on the pipeline
assert.Equal(t, map[string]any{
"name": "my pipeline",
"library": []any{
map[string]any{
"notebook": map[string]any{
"path": "notebook path",
},
},
map[string]any{
"file": map[string]any{
"path": "file path",
},
},
},
"notification": []any{
map[string]any{
"alerts": []any{
"on-update-fatal-failure",
},
"email_recipients": []any{
"jane@doe.com",
},
},
map[string]any{
"alerts": []any{
"on-update-failure",
"on-flow-failure",
},
"email_recipients": []any{
"jane@doe.com",
"john@doe.com",
},
},
},
"cluster": []any{
map[string]any{
"label": "default",
"num_workers": int64(1),
},
},
}, out.Pipeline["my_pipeline"])
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
PipelineId: "${databricks_pipeline.my_pipeline.id}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}, out.Permissions["pipeline_my_pipeline"])
}

View File

@ -0,0 +1,45 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
)
func convertRegisteredModelResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourceRegisteredModel{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "registered model normalization diagnostic: %s", diag.Summary)
}
return vout, nil
}
type registeredModelConverter struct{}
func (registeredModelConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertRegisteredModelResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.RegisteredModel[key] = vout.AsAny()
// Configure grants for this resource.
if grants := convertGrantsResource(ctx, vin); grants != nil {
grants.Function = fmt.Sprintf("${databricks_registered_model.%s.id}", key)
out.Grants["registered_model_"+key] = grants
}
return nil
}
func init() {
registerConverter("registered_models", registeredModelConverter{})
}

View File

@ -0,0 +1,58 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertRegisteredModel(t *testing.T) {
var src = resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name",
CatalogName: "catalog",
SchemaName: "schema",
Comment: "comment",
},
Grants: []resources.Grant{
{
Privileges: []string{"EXECUTE"},
Principal: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = registeredModelConverter{}.Convert(ctx, "my_registered_model", vin, out)
require.NoError(t, err)
// Assert equality on the registered model
assert.Equal(t, map[string]any{
"name": "name",
"catalog_name": "catalog",
"schema_name": "schema",
"comment": "comment",
}, out.RegisteredModel["my_registered_model"])
// Assert equality on the grants
assert.Equal(t, &schema.ResourceGrants{
Function: "${databricks_registered_model.my_registered_model.id}",
Grant: []schema.ResourceGrantsGrant{
{
Privileges: []string{"EXECUTE"},
Principal: "jane@doe.com",
},
},
}, out.Grants["registered_model_my_registered_model"])
}

View File

@ -0,0 +1,46 @@
package tfdyn
import (
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
)
// renameKeys renames keys in the given map value.
//
// Terraform resources sometimes use singular names for repeating blocks where the API
// definition uses the plural name. This function can convert between the two.
func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) {
var err error
var acc = dyn.V(map[string]dyn.Value{})
nv, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if len(p) == 0 {
return v, nil
}
// Check if this key should be renamed.
for oldKey, newKey := range rename {
if p[0].Key() != oldKey {
continue
}
// Add the new key to the accumulator.
p[0] = dyn.Key(newKey)
acc, err = dyn.SetByPath(acc, p, v)
if err != nil {
return dyn.NilValue, err
}
return dyn.InvalidValue, dyn.ErrDrop
}
// Pass through all other values.
return v, dyn.ErrSkip
})
if err != nil {
return dyn.InvalidValue, err
}
// Merge the accumulator with the original value.
return merge.Merge(nv, acc)
}

View File

@ -7,6 +7,8 @@ import (
"path/filepath"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
)
type write struct{}
@ -21,7 +23,15 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error {
return err
}
root := BundleToTerraform(&b.Config)
var root *schema.Root
err = b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
root, err = BundleToTerraformWithDynValue(ctx, v)
return v, err
})
if err != nil {
return err
}
f, err := os.Create(filepath.Join(dir, "bundle.tf.json"))
if err != nil {
return err

View File

@ -10,6 +10,14 @@ type pathComponent struct {
index int
}
func (c pathComponent) Key() string {
return c.key
}
func (c pathComponent) Index() int {
return c.index
}
func (c pathComponent) isKey() bool {
return c.key != ""
}