local and remote `.tfstate` utilities

This commit is contained in:
Serge Smertin 2022-05-21 15:23:37 +02:00
parent 3907dcdba9
commit 06db8376bc
3 changed files with 120 additions and 2 deletions

View File

@ -71,6 +71,17 @@ func (i *inner) Me() *scim.User {
return &me return &me
} }
func (i *inner) DeploymentIsolationPrefix() string {
if i.project.Isolation == None {
return i.project.Name
}
if i.project.Isolation == Soft {
me := i.Me()
return fmt.Sprintf("%s/%s", i.project.Name, me.UserName)
}
panic(fmt.Errorf("unknow project isolation: %s", i.project.Isolation))
}
func (i *inner) DevelopmentCluster(ctx context.Context) (cluster clusters.ClusterInfo, err error) { func (i *inner) DevelopmentCluster(ctx context.Context) (cluster clusters.ClusterInfo, err error) {
api := clusters.NewClustersAPI(ctx, i.Client()) // TODO: rewrite with normal SDK api := clusters.NewClustersAPI(ctx, i.Client()) // TODO: rewrite with normal SDK
if i.project.DevCluster == nil { if i.project.DevCluster == nil {
@ -82,8 +93,7 @@ func (i *inner) DevelopmentCluster(ctx context.Context) (cluster clusters.Cluste
err = fmt.Errorf("projects with soft isolation cannot have named clusters") err = fmt.Errorf("projects with soft isolation cannot have named clusters")
return return
} }
me := i.Me() dc.ClusterName = fmt.Sprintf("dev/%s", i.DeploymentIsolationPrefix())
dc.ClusterName = fmt.Sprintf("dev/%s/%s", i.project.Name, me.UserName)
} }
if dc.ClusterName == "" { if dc.ClusterName == "" {
err = fmt.Errorf("please either pick `isolation: soft` or specify a shared cluster name") err = fmt.Errorf("please either pick `isolation: soft` or specify a shared cluster name")

View File

@ -39,14 +39,119 @@ Let's see how far we can get without GRPC magic.
package terraform package terraform
import ( import (
"bytes"
"context" "context"
"encoding/json"
"fmt"
"io"
"os"
"github.com/databricks/bricks/project"
"github.com/databrickslabs/terraform-provider-databricks/storage"
"github.com/hashicorp/go-version" "github.com/hashicorp/go-version"
"github.com/hashicorp/hc-install/product" "github.com/hashicorp/hc-install/product"
"github.com/hashicorp/hc-install/releases" "github.com/hashicorp/hc-install/releases"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json"
) )
const DeploymentStateRemoteLocation = "dbfs:/FileStore/deployment-state"
type TerraformDeployer struct {
WorkDir string
CopyTfs bool
tf *tfexec.Terraform
}
func (d *TerraformDeployer) Init(ctx context.Context) error {
if d.CopyTfs {
panic("copying tf configuration files to a temporary dir not yet implemented")
}
// TODO: most likely merge the methods
exec, err := newTerraform(ctx, d.WorkDir, map[string]string{})
if err != nil {
return err
}
d.tf = exec
return nil
}
func (d *TerraformDeployer) remoteTfstateLoc() string {
prefix := project.Current.DeploymentIsolationPrefix()
return fmt.Sprintf("%s/%s/terraform.tfstate", DeploymentStateRemoteLocation, prefix)
}
func (d *TerraformDeployer) remoteState(ctx context.Context) (*tfjson.State, error) {
dbfs := storage.NewDbfsAPI(ctx, project.Current.Client())
raw, err := dbfs.Read(d.remoteTfstateLoc())
if err != nil {
return nil, err
}
return d.tfstateFromReader(bytes.NewBuffer(raw))
}
func (d *TerraformDeployer) openLocalState() (*os.File, error) {
return os.Open(fmt.Sprintf("%s/terraform.tfstate", d.WorkDir))
}
func (d *TerraformDeployer) localState() (*tfjson.State, error) {
raw, err := d.openLocalState()
if err != nil {
return nil, err
}
return d.tfstateFromReader(raw)
}
func (d *TerraformDeployer) tfstateFromReader(reader io.Reader) (*tfjson.State, error) {
var state tfjson.State
state.UseJSONNumber(true)
decoder := json.NewDecoder(reader)
decoder.UseNumber()
err := decoder.Decode(&state)
if err != nil {
return nil, err
}
err = state.Validate()
if err != nil {
return nil, err
}
return &state, nil
}
func (d *TerraformDeployer) uploadTfstate(ctx context.Context) error {
// scripts/azcli-integration/terraform.tfstate
dbfs := storage.NewDbfsAPI(ctx, project.Current.Client())
f, err := d.openLocalState()
if err != nil {
return err
}
raw, err := io.ReadAll(f)
if err != nil {
return err
}
// TODO: make sure that deployment locks are implemented
return dbfs.Create(d.remoteTfstateLoc(), raw, true)
}
func (d *TerraformDeployer) downloadTfstate(ctx context.Context) error {
remote, err := d.remoteState(ctx)
if err != nil {
return err
}
local, err := d.openLocalState()
if err != nil {
return err
}
raw, err := json.Marshal(remote)
if err != nil {
return err
}
_, err = io.Copy(local, bytes.NewBuffer(raw))
return err
}
// installs terraform to a temporary directory (for now) // installs terraform to a temporary directory (for now)
func installTerraform(ctx context.Context) (string, error) { func installTerraform(ctx context.Context) (string, error) {
// TODO: let configuration and/or environment variable specify // TODO: let configuration and/or environment variable specify

View File

@ -36,6 +36,9 @@ func TestSomething(t *testing.T) {
if r.Type != "databricks_job" { if r.Type != "databricks_job" {
continue continue
} }
// TODO: validate that libraries on jobs defined in *.tf and libraries
// in `install_requires` defined in setup.py are the same. Exist with
// the explanatory error otherwise.
found = true found = true
// resource "databricks_job" "this" // resource "databricks_job" "this"
assert.Equal(t, "this", r.Name) assert.Equal(t, "this", r.Name)