databricks-cli/bundle/deploy/terraform/tfdyn/convert_job_test.go

152 lines
3.3 KiB
Go
Raw Normal View History

package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertJob(t *testing.T) {
var src = resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "key",
Bump github.com/databricks/databricks-sdk-go from 0.36.0 to 0.37.0 (#1326) [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.36.0&new-version=0.37.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-04-03 10:39:53 +00:00
NewCluster: compute.ClusterSpec{
SparkVersion: "10.4.x-scala2.12",
},
},
},
GitSource: &jobs.GitSource{
GitProvider: jobs.GitProviderGitHub,
GitUrl: "https://github.com/foo/bar",
},
Parameters: []jobs.JobParameterDefinition{
{
Name: "param1",
Default: "default1",
},
{
Name: "param2",
Default: "default2",
},
},
Tasks: []jobs.Task{
{
TaskKey: "task_key_b",
JobClusterKey: "job_cluster_key_b",
Libraries: []compute.Library{
{
Pypi: &compute.PythonPyPiLibrary{
Package: "package",
},
},
{
Whl: "/path/to/my.whl",
},
},
},
{
TaskKey: "task_key_a",
JobClusterKey: "job_cluster_key_a",
},
{
TaskKey: "task_key_c",
JobClusterKey: "job_cluster_key_c",
},
{
Description: "missing task key 😱",
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = jobConverter{}.Convert(ctx, "my_job", vin, out)
require.NoError(t, err)
// Assert equality on the job
assert.Equal(t, map[string]any{
"name": "my job",
"job_cluster": []any{
map[string]any{
"job_cluster_key": "key",
"new_cluster": map[string]any{
"spark_version": "10.4.x-scala2.12",
},
},
},
"git_source": map[string]any{
"provider": "gitHub",
"url": "https://github.com/foo/bar",
},
"parameter": []any{
map[string]any{
"name": "param1",
"default": "default1",
},
map[string]any{
"name": "param2",
"default": "default2",
},
},
"task": []any{
map[string]any{
"description": "missing task key 😱",
},
map[string]any{
"task_key": "task_key_a",
"job_cluster_key": "job_cluster_key_a",
},
map[string]any{
"task_key": "task_key_b",
"job_cluster_key": "job_cluster_key_b",
"library": []any{
map[string]any{
"pypi": map[string]any{
"package": "package",
},
},
map[string]any{
"whl": "/path/to/my.whl",
},
},
},
map[string]any{
"task_key": "task_key_c",
"job_cluster_key": "job_cluster_key_c",
},
},
}, out.Job["my_job"])
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
JobId: "${databricks_job.my_job.id}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_VIEW",
UserName: "jane@doe.com",
},
},
}, out.Permissions["job_my_job"])
}