2022-12-09 07:57:30 +00:00
|
|
|
package terraform
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
2022-12-15 14:12:47 +00:00
|
|
|
"fmt"
|
2022-12-09 07:57:30 +00:00
|
|
|
|
|
|
|
"github.com/databricks/bricks/bundle/config"
|
2023-03-21 09:58:16 +00:00
|
|
|
"github.com/databricks/bricks/bundle/config/resources"
|
2022-12-09 07:57:30 +00:00
|
|
|
"github.com/databricks/bricks/bundle/internal/tf/schema"
|
2022-12-15 14:12:47 +00:00
|
|
|
tfjson "github.com/hashicorp/terraform-json"
|
2022-12-09 07:57:30 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
func conv(from any, to any) {
|
|
|
|
buf, _ := json.Marshal(from)
|
|
|
|
json.Unmarshal(buf, &to)
|
|
|
|
}
|
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
func convPermissions(acl []resources.Permission) *schema.ResourcePermissions {
|
|
|
|
if len(acl) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
resource := schema.ResourcePermissions{}
|
|
|
|
for _, ac := range acl {
|
|
|
|
resource.AccessControl = append(resource.AccessControl, convPermission(ac))
|
|
|
|
}
|
|
|
|
|
|
|
|
return &resource
|
|
|
|
}
|
|
|
|
|
|
|
|
func convPermission(ac resources.Permission) schema.ResourcePermissionsAccessControl {
|
|
|
|
dst := schema.ResourcePermissionsAccessControl{
|
|
|
|
PermissionLevel: ac.Level,
|
|
|
|
}
|
|
|
|
if ac.UserName != "" {
|
|
|
|
dst.UserName = ac.UserName
|
|
|
|
}
|
|
|
|
if ac.GroupName != "" {
|
|
|
|
dst.GroupName = ac.GroupName
|
|
|
|
}
|
|
|
|
if ac.ServicePrincipalName != "" {
|
|
|
|
dst.ServicePrincipalName = ac.ServicePrincipalName
|
|
|
|
}
|
|
|
|
return dst
|
|
|
|
}
|
|
|
|
|
2022-12-09 07:57:30 +00:00
|
|
|
// BundleToTerraform converts resources in a bundle configuration
|
|
|
|
// to the equivalent Terraform JSON representation.
|
|
|
|
//
|
|
|
|
// NOTE: THIS IS CURRENTLY A HACK. WE NEED A BETTER WAY TO
|
|
|
|
// CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT.
|
|
|
|
func BundleToTerraform(config *config.Root) *schema.Root {
|
|
|
|
tfroot := schema.NewRoot()
|
2022-12-12 10:31:28 +00:00
|
|
|
tfroot.Provider = schema.NewProviders()
|
2022-12-09 07:57:30 +00:00
|
|
|
tfroot.Provider.Databricks.Profile = config.Workspace.Profile
|
2022-12-12 10:31:28 +00:00
|
|
|
tfroot.Resource = schema.NewResources()
|
2022-12-09 07:57:30 +00:00
|
|
|
|
|
|
|
for k, src := range config.Resources.Jobs {
|
|
|
|
var dst schema.ResourceJob
|
|
|
|
conv(src, &dst)
|
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
if src.JobSettings != nil {
|
|
|
|
for _, v := range src.Tasks {
|
|
|
|
var t schema.ResourceJobTask
|
|
|
|
conv(v, &t)
|
2022-12-12 15:36:59 +00:00
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
for _, v_ := range v.Libraries {
|
|
|
|
var l schema.ResourceJobTaskLibrary
|
|
|
|
conv(v_, &l)
|
|
|
|
t.Library = append(t.Library, l)
|
|
|
|
}
|
2022-12-12 15:36:59 +00:00
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
dst.Task = append(dst.Task, t)
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
for _, v := range src.JobClusters {
|
|
|
|
var t schema.ResourceJobJobCluster
|
|
|
|
conv(v, &t)
|
|
|
|
dst.JobCluster = append(dst.JobCluster, t)
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
// Unblock downstream work. To be addressed more generally later.
|
|
|
|
if git := src.GitSource; git != nil {
|
|
|
|
dst.GitSource = &schema.ResourceJobGitSource{
|
|
|
|
Url: git.GitUrl,
|
|
|
|
Branch: git.GitBranch,
|
|
|
|
Commit: git.GitCommit,
|
|
|
|
Provider: string(git.GitProvider),
|
|
|
|
Tag: git.GitTag,
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
tfroot.Resource.Job[k] = &dst
|
2023-03-21 09:58:16 +00:00
|
|
|
|
|
|
|
// Configure permissions for this resource.
|
|
|
|
if rp := convPermissions(src.Permissions); rp != nil {
|
|
|
|
rp.JobId = fmt.Sprintf("${databricks_job.%s.id}", k)
|
|
|
|
tfroot.Resource.Permissions["job_"+k] = rp
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for k, src := range config.Resources.Pipelines {
|
|
|
|
var dst schema.ResourcePipeline
|
|
|
|
conv(src, &dst)
|
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
if src.PipelineSpec != nil {
|
|
|
|
for _, v := range src.Libraries {
|
|
|
|
var l schema.ResourcePipelineLibrary
|
|
|
|
conv(v, &l)
|
|
|
|
dst.Library = append(dst.Library, l)
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
|
2023-03-21 09:58:16 +00:00
|
|
|
for _, v := range src.Clusters {
|
|
|
|
var l schema.ResourcePipelineCluster
|
|
|
|
conv(v, &l)
|
|
|
|
dst.Cluster = append(dst.Cluster, l)
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
tfroot.Resource.Pipeline[k] = &dst
|
2023-03-21 09:58:16 +00:00
|
|
|
|
|
|
|
// Configure permissions for this resource.
|
|
|
|
if rp := convPermissions(src.Permissions); rp != nil {
|
|
|
|
rp.PipelineId = fmt.Sprintf("${databricks_pipeline.%s.id}", k)
|
|
|
|
tfroot.Resource.Permissions["pipeline_"+k] = rp
|
|
|
|
}
|
2022-12-09 07:57:30 +00:00
|
|
|
}
|
|
|
|
|
2023-03-20 20:28:43 +00:00
|
|
|
for k, src := range config.Resources.Models {
|
|
|
|
var dst schema.ResourceMlflowModel
|
|
|
|
conv(src, &dst)
|
|
|
|
tfroot.Resource.MlflowModel[k] = &dst
|
2023-03-21 09:58:16 +00:00
|
|
|
|
|
|
|
// Configure permissions for this resource.
|
|
|
|
if rp := convPermissions(src.Permissions); rp != nil {
|
|
|
|
rp.RegisteredModelId = fmt.Sprintf("${databricks_mlflow_model.%s.registered_model_id}", k)
|
|
|
|
tfroot.Resource.Permissions["mlflow_model_"+k] = rp
|
|
|
|
}
|
2023-03-20 20:28:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for k, src := range config.Resources.Experiments {
|
|
|
|
var dst schema.ResourceMlflowExperiment
|
|
|
|
conv(src, &dst)
|
|
|
|
tfroot.Resource.MlflowExperiment[k] = &dst
|
2023-03-21 09:58:16 +00:00
|
|
|
|
|
|
|
// Configure permissions for this resource.
|
|
|
|
if rp := convPermissions(src.Permissions); rp != nil {
|
|
|
|
rp.ExperimentId = fmt.Sprintf("${databricks_mlflow_experiment.%s.id}", k)
|
|
|
|
tfroot.Resource.Permissions["mlflow_experiment_"+k] = rp
|
|
|
|
}
|
2023-03-20 20:28:43 +00:00
|
|
|
}
|
|
|
|
|
2022-12-09 07:57:30 +00:00
|
|
|
return tfroot
|
|
|
|
}
|
2022-12-15 14:12:47 +00:00
|
|
|
|
|
|
|
func TerraformToBundle(state *tfjson.State, config *config.Root) error {
|
|
|
|
if state.Values == nil {
|
|
|
|
return fmt.Errorf("state.Values not set")
|
|
|
|
}
|
|
|
|
|
|
|
|
if state.Values.RootModule == nil {
|
|
|
|
return fmt.Errorf("state.Values.RootModule not set")
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, resource := range state.Values.RootModule.Resources {
|
|
|
|
// Limit to resources.
|
|
|
|
if resource.Mode != tfjson.ManagedResourceMode {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
switch resource.Type {
|
|
|
|
case "databricks_job":
|
|
|
|
var tmp schema.ResourceJob
|
|
|
|
conv(resource.AttributeValues, &tmp)
|
|
|
|
cur := config.Resources.Jobs[resource.Name]
|
|
|
|
conv(tmp, &cur)
|
|
|
|
config.Resources.Jobs[resource.Name] = cur
|
|
|
|
case "databricks_pipeline":
|
|
|
|
var tmp schema.ResourcePipeline
|
|
|
|
conv(resource.AttributeValues, &tmp)
|
|
|
|
cur := config.Resources.Pipelines[resource.Name]
|
|
|
|
conv(tmp, &cur)
|
|
|
|
config.Resources.Pipelines[resource.Name] = cur
|
2023-03-20 20:28:43 +00:00
|
|
|
case "databricks_mlflow_model":
|
|
|
|
var tmp schema.ResourceMlflowModel
|
|
|
|
conv(resource.AttributeValues, &tmp)
|
|
|
|
cur := config.Resources.Models[resource.Name]
|
|
|
|
conv(tmp, &cur)
|
|
|
|
config.Resources.Models[resource.Name] = cur
|
|
|
|
case "databricks_mlflow_experiment":
|
|
|
|
var tmp schema.ResourceMlflowExperiment
|
|
|
|
conv(resource.AttributeValues, &tmp)
|
|
|
|
cur := config.Resources.Experiments[resource.Name]
|
|
|
|
conv(tmp, &cur)
|
|
|
|
config.Resources.Experiments[resource.Name] = cur
|
2022-12-15 14:12:47 +00:00
|
|
|
default:
|
|
|
|
return fmt.Errorf("missing mapping for %s", resource.Type)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|