Mutators to work with Terraform (#124)

This includes 3 mutators:
* Interpolate resources references to TF compatible format
* Convert resources struct to TF JSON format and write it to disk
* Run TF apply
This commit is contained in:
Pieter Noordhuis 2022-12-09 08:57:30 +01:00 committed by GitHub
parent ff89c9d06f
commit 4f668fc58b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 231 additions and 0 deletions

View File

@ -0,0 +1,51 @@
package terraform
import (
"context"
"fmt"
"os/exec"
"github.com/databricks/bricks/bundle"
"github.com/hashicorp/terraform-exec/tfexec"
)
type apply struct{}
func (w *apply) Name() string {
return "terraform.Apply"
}
func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutator, error) {
workingDir, err := Dir(b)
if err != nil {
return nil, err
}
execPath, err := exec.LookPath("terraform")
if err != nil {
return nil, err
}
tf, err := tfexec.NewTerraform(workingDir, execPath)
if err != nil {
return nil, err
}
err = tf.Init(ctx, tfexec.Upgrade(true))
if err != nil {
return nil, fmt.Errorf("terraform init: %w", err)
}
err = tf.Apply(ctx)
if err != nil {
return nil, fmt.Errorf("terraform apply: %w", err)
}
return nil, nil
}
// Apply returns a [bundle.Mutator] that runs the equivalent of `terraform apply`
// from the bundle's ephemeral working directory for Terraform.
func Apply() bundle.Mutator {
return &apply{}
}

View File

@ -0,0 +1,77 @@
package terraform
import (
"encoding/json"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/internal/tf/schema"
)
func conv(from any, to any) {
buf, _ := json.Marshal(from)
json.Unmarshal(buf, &to)
}
// BundleToTerraform converts resources in a bundle configuration
// to the equivalent Terraform JSON representation.
//
// NOTE: THIS IS CURRENTLY A HACK. WE NEED A BETTER WAY TO
// CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT.
func BundleToTerraform(config *config.Root) *schema.Root {
tfroot := schema.NewRoot()
tfroot.Provider.Databricks.Profile = config.Workspace.Profile
for k, src := range config.Resources.Jobs {
var dst schema.ResourceJob
conv(src, &dst)
for _, v := range src.Tasks {
var t schema.ResourceJobTask
conv(v, &t)
dst.Task = append(dst.Task, t)
}
for _, v := range src.JobClusters {
var t schema.ResourceJobJobCluster
conv(v, &t)
dst.JobCluster = append(dst.JobCluster, t)
}
// Unblock downstream work. To be addressed more generally later.
if git := src.GitSource; git != nil {
dst.GitSource = &schema.ResourceJobGitSource{
Url: git.GitUrl,
Branch: git.GitBranch,
Commit: git.GitCommit,
Provider: string(git.GitProvider),
Tag: git.GitTag,
}
}
tfroot.Resource.Job[k] = &dst
}
for k, src := range config.Resources.Pipelines {
var dst schema.ResourcePipeline
conv(src, &dst)
for _, v := range src.Libraries {
var l schema.ResourcePipelineLibrary
conv(v, &l)
dst.Library = append(dst.Library, l)
}
for _, v := range src.Clusters {
var l schema.ResourcePipelineCluster
conv(v, &l)
dst.Cluster = append(dst.Cluster, l)
}
tfroot.Resource.Pipeline[k] = &dst
}
// Clear data sources because we don't have any.
tfroot.Data = nil
return tfroot
}

View File

@ -0,0 +1,25 @@
package terraform
import (
"os"
"path/filepath"
"github.com/databricks/bricks/bundle"
)
// Dir returns the Terraform working directory for a given bundle.
// The working directory is emphemeral and nested under the bundle's cache directory.
func Dir(b *bundle.Bundle) (string, error) {
path, err := b.CacheDir()
if err != nil {
return "", err
}
nest := filepath.Join(path, "terraform")
err = os.MkdirAll(nest, 0700)
if err != nil {
return "", err
}
return nest, nil
}

View File

@ -0,0 +1,32 @@
package terraform
import (
"fmt"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/interpolation"
)
// Rewrite variable references to resources into Terraform compatible format.
func interpolateTerraformResourceIdentifiers(path string, lookup map[string]string) (string, error) {
parts := strings.Split(path, interpolation.Delimiter)
if parts[0] == "resources" {
switch parts[1] {
case "pipelines":
path = strings.Join(append([]string{"databricks_pipeline"}, parts[2:]...), interpolation.Delimiter)
return fmt.Sprintf("${%s}", path), nil
case "jobs":
path = strings.Join(append([]string{"databricks_job"}, parts[2:]...), interpolation.Delimiter)
return fmt.Sprintf("${%s}", path), nil
default:
panic("TODO: " + parts[1])
}
}
return interpolation.DefaultLookup(path, lookup)
}
func Interpolate() bundle.Mutator {
return interpolation.Interpolate(interpolateTerraformResourceIdentifiers)
}

View File

@ -0,0 +1,46 @@
package terraform
import (
"context"
"encoding/json"
"os"
"path/filepath"
"github.com/databricks/bricks/bundle"
)
type write struct{}
func (w *write) Name() string {
return "terraform.Write"
}
func (w *write) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutator, error) {
dir, err := Dir(b)
if err != nil {
return nil, err
}
root := BundleToTerraform(&b.Config)
f, err := os.Create(filepath.Join(dir, "bundle.tf.json"))
if err != nil {
return nil, err
}
defer f.Close()
enc := json.NewEncoder(f)
enc.SetIndent("", " ")
err = enc.Encode(root)
if err != nil {
return nil, err
}
return nil, nil
}
// Write returns a [bundle.Mutator] that converts resources in a bundle configuration
// to the equivalent Terraform JSON representation and writes the result to a file.
func Write() bundle.Mutator {
return &write{}
}