databricks-cli/internal/bundle/job_metadata_test.go

106 lines
2.7 KiB
Go
Raw Normal View History

Persist deployment metadata in WSFS (#845) ## Changes This PR introduces a metadata struct that stores a subset of bundle configuration that we wish to expose to other Databricks services that wish to integrate with bundles. This metadata file is uploaded to a file `${bundle.workspace.state_path}/metadata.json` in the WSFS destination of the bundle deployment. Documentation for emitted metadata fields: * `version`: Version for the metadata file schema * `config.bundle.git.branch`: Name of the git branch the bundle was deployed from. * `config.bundle.git.origin_url`: URL for git remote "origin" * `config.bundle.git.bundle_root_path`: Relative path of the bundle root from the root of the git repository. Is set to "." if they are the same. * `config.bundle.git.commit`: SHA-1 commit hash of the exact commit this bundle was deployed from. Note, the deployment might not exactly match this commit version if there are changes that have not been committed to git at deploy time, * `file_path`: Path in workspace where we sync bundle files to. * `resources.jobs.[job-ref].id`: Id of the job * `resources.jobs.[job-ref].relative_path`: Relative path of the yaml config file from the bundle root where this job was defined. Example metadata object when bundle root and git root are the same: ```json { "version": 1, "config": { "bundle": { "lock": {}, "git": { "branch": "master", "origin_url": "www.host.com", "commit": "7af8e5d3f5dceffff9295d42d21606ccf056dce0", "bundle_root_path": "." } }, "workspace": { "file_path": "/Users/shreyas.goenka@databricks.com/.bundle/pipeline-progress/default/files" }, "resources": { "jobs": { "bar": { "id": "245921165354846", "relative_path": "databricks.yml" } } }, "sync": {} } } ``` Example metadata when the git root is one level above the bundle repo: ```json { "version": 1, "config": { "bundle": { "lock": {}, "git": { "branch": "dev-branch", "origin_url": "www.my-repo.com", "commit": "3db46ef750998952b00a2b3e7991e31787e4b98b", "bundle_root_path": "pipeline-progress" } }, "workspace": { "file_path": "/Users/shreyas.goenka@databricks.com/.bundle/pipeline-progress/default/files" }, "resources": { "jobs": { "bar": { "id": "245921165354846", "relative_path": "databricks.yml" } } }, "sync": {} } } ``` This unblocks integration to the jobs break glass UI for bundles. ## Tests Unit tests and integration tests.
2023-10-27 12:55:43 +00:00
package bundle
import (
"context"
"encoding/json"
"fmt"
"io"
"path"
"strconv"
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/metadata"
"github.com/databricks/cli/internal"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAccJobsMetadataFile(t *testing.T) {
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
nodeTypeId := internal.GetNodeTypeId(env)
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, "job_metadata", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": "13.2.x-snapshot-scala2.12",
})
require.NoError(t, err)
// deploy bundle
err = deployBundle(t, bundleRoot)
require.NoError(t, err)
// Cleanup the deployed bundle
t.Cleanup(func() {
err = destroyBundle(t, bundleRoot)
require.NoError(t, err)
})
// assert job 1 is created
jobName := "test-job-metadata-1-" + uniqueId
job1, err := w.Jobs.GetBySettingsName(context.Background(), jobName)
require.NoError(t, err)
assert.Equal(t, job1.Settings.Name, jobName)
// assert job 2 is created
jobName = "test-job-metadata-2-" + uniqueId
job2, err := w.Jobs.GetBySettingsName(context.Background(), jobName)
require.NoError(t, err)
assert.Equal(t, job2.Settings.Name, jobName)
// Compute root path for the bundle deployment
me, err := w.CurrentUser.Me(context.Background())
require.NoError(t, err)
root := fmt.Sprintf("/Users/%s/.bundle/%s", me.UserName, uniqueId)
f, err := filer.NewWorkspaceFilesClient(w, root)
require.NoError(t, err)
// Read metadata object from the workspace
r, err := f.Read(context.Background(), "state/metadata.json")
require.NoError(t, err)
b, err := io.ReadAll(r)
require.NoError(t, err)
actualMetadata := metadata.Metadata{}
err = json.Unmarshal(b, &actualMetadata)
require.NoError(t, err)
// expected value for the metadata
expectedMetadata := metadata.Metadata{
Version: metadata.Version,
Config: metadata.Config{
Bundle: metadata.Bundle{
Git: config.Git{
BundleRootPath: ".",
},
},
Workspace: metadata.Workspace{
FilesPath: path.Join(root, "files"),
},
Resources: metadata.Resources{
Jobs: map[string]*metadata.Job{
"foo": {
ID: strconv.FormatInt(job1.JobId, 10),
RelativePath: "databricks.yml",
},
"bar": {
ID: strconv.FormatInt(job2.JobId, 10),
RelativePath: "a/b/resources.yml",
},
},
},
},
}
// Assert metadata matches what we expected.
assert.Equal(t, expectedMetadata, actualMetadata)
}