Add minimal test for conversion to TF JSON format (#130)

This commit is contained in:
Pieter Noordhuis 2022-12-12 11:31:28 +01:00 committed by GitHub
parent 94a86972e5
commit 8640696b4b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 47 additions and 3 deletions

View File

@ -19,7 +19,9 @@ func conv(from any, to any) {
// CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT.
func BundleToTerraform(config *config.Root) *schema.Root {
tfroot := schema.NewRoot()
tfroot.Provider = schema.NewProviders()
tfroot.Provider.Databricks.Profile = config.Workspace.Profile
tfroot.Resource = schema.NewResources()
for k, src := range config.Resources.Jobs {
var dst schema.ResourceJob
@ -70,8 +72,5 @@ func BundleToTerraform(config *config.Root) *schema.Root {
tfroot.Resource.Pipeline[k] = &dst
}
// Clear data sources because we don't have any.
tfroot.Data = nil
return tfroot
}

View File

@ -0,0 +1,45 @@
package terraform
import (
"testing"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/clusters"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert"
)
func TestConvertJob(t *testing.T) {
var src = resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "key",
NewCluster: &clusters.CreateCluster{
SparkVersion: "10.4.x-scala2.12",
},
},
},
GitSource: &jobs.GitSource{
GitProvider: jobs.GitSourceGitProviderGithub,
GitUrl: "https://github.com/foo/bar",
},
},
}
var config = config.Root{
Resources: config.Resources{
Jobs: map[string]resources.Job{
"my_job": src,
},
},
}
out := BundleToTerraform(&config)
assert.Equal(t, "my job", out.Resource.Job["my_job"].Name)
assert.Len(t, out.Resource.Job["my_job"].JobCluster, 1)
assert.Equal(t, "https://github.com/foo/bar", out.Resource.Job["my_job"].GitSource.Url)
assert.Nil(t, out.Data)
}