mirror of https://github.com/databricks/cli.git
Add integration test for default Python bundle initialization
This commit is contained in:
parent
dd9f59837e
commit
727dc793c7
9
go.mod
9
go.mod
|
@ -1,6 +1,6 @@
|
|||
module github.com/databricks/cli
|
||||
|
||||
go 1.23
|
||||
go 1.23.0
|
||||
|
||||
toolchain go1.23.4
|
||||
|
||||
|
@ -8,12 +8,14 @@ require (
|
|||
github.com/Masterminds/semver/v3 v3.3.1 // MIT
|
||||
github.com/briandowns/spinner v1.23.1 // Apache 2.0
|
||||
github.com/databricks/databricks-sdk-go v0.54.0 // Apache 2.0
|
||||
github.com/elliotchance/orderedmap/v3 v3.0.0 // MIT
|
||||
github.com/fatih/color v1.18.0 // MIT
|
||||
github.com/google/uuid v1.6.0 // BSD-3-Clause
|
||||
github.com/hashicorp/go-version v1.7.0 // MPL 2.0
|
||||
github.com/hashicorp/hc-install v0.9.0 // MPL 2.0
|
||||
github.com/hashicorp/terraform-exec v0.21.0 // MPL 2.0
|
||||
github.com/hashicorp/terraform-json v0.23.0 // MPL 2.0
|
||||
github.com/hexops/gotextdiff v1.0.3 // BSD 3-Clause "New" or "Revised" License
|
||||
github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause
|
||||
github.com/mattn/go-isatty v0.0.20 // MIT
|
||||
github.com/nwidger/jsoncolor v0.3.2 // MIT
|
||||
|
@ -22,6 +24,7 @@ require (
|
|||
github.com/spf13/cobra v1.8.1 // Apache 2.0
|
||||
github.com/spf13/pflag v1.0.5 // BSD-3-Clause
|
||||
github.com/stretchr/testify v1.10.0 // MIT
|
||||
github.com/wI2L/jsondiff v0.6.1 // MIT
|
||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
|
||||
golang.org/x/mod v0.22.0
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
|
@ -55,6 +58,10 @@ require (
|
|||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
github.com/zclconf/go-cty v1.15.0 // indirect
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
|
||||
|
|
|
@ -37,6 +37,8 @@ github.com/databricks/databricks-sdk-go v0.54.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtg
|
|||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/elliotchance/orderedmap/v3 v3.0.0 h1:Yay/tDjX+vzza+Drcoo8VEbuBnOYGpgenCXWcpQSFDg=
|
||||
github.com/elliotchance/orderedmap/v3 v3.0.0/go.mod h1:G+Hc2RwaZvJMcS4JpGCOyViCnGeKf0bTYCGTO4uhjSo=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
|
@ -109,6 +111,8 @@ github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVW
|
|||
github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg=
|
||||
github.com/hashicorp/terraform-json v0.23.0 h1:sniCkExU4iKtTADReHzACkk8fnpQXrdD2xoR+lppBkI=
|
||||
github.com/hashicorp/terraform-json v0.23.0/go.mod h1:MHdXbBAbSg0GvzuWazEGKAn/cyNfIB7mN6y7KJN6y2c=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
|
@ -156,6 +160,18 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO
|
|||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
github.com/wI2L/jsondiff v0.6.1 h1:ISZb9oNWbP64LHnu4AUhsMF5W0FIj5Ok3Krip9Shqpw=
|
||||
github.com/wI2L/jsondiff v0.6.1/go.mod h1:KAEIojdQq66oJiHhDyQez2x+sRit0vIzC9KeK0yizxM=
|
||||
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
||||
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
||||
github.com/zclconf/go-cty v1.15.0 h1:tTCRWxsexYUmtt/wVxgDClUe+uQusuI443uL6e+5sXQ=
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
package bundle_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/integration/internal/acc"
|
||||
"github.com/databricks/cli/internal/testcli"
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/databricks/cli/libs/python/pythontest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var pythonVersions = []string{
|
||||
"3.8",
|
||||
"3.9",
|
||||
"3.10",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"3.13",
|
||||
}
|
||||
|
||||
var pythonVersionsShort = []string{
|
||||
"3.9",
|
||||
"3.12",
|
||||
}
|
||||
|
||||
var extraInstalls = map[string][]string{
|
||||
"3.12": {"setuptools"},
|
||||
"3.13": {"setuptools"},
|
||||
}
|
||||
|
||||
func TestDefaultPython(t *testing.T) {
|
||||
versions := pythonVersions
|
||||
if testing.Short() {
|
||||
versions = pythonVersionsShort
|
||||
}
|
||||
|
||||
for _, pythonVersion := range versions {
|
||||
t.Run(pythonVersion, func(t *testing.T) {
|
||||
testDefaultPython(t, pythonVersion)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func testDefaultPython(t *testing.T, pythonVersion string) {
|
||||
ctx, wt := acc.WorkspaceTest(t)
|
||||
|
||||
uniqueProjectId := testutil.RandomName("")
|
||||
ctx, replacements := testcli.WithReplacementsMap(ctx)
|
||||
replacements.Set(uniqueProjectId, "$UNIQUE_PRJ")
|
||||
|
||||
testcli.PrepareReplacements(t, replacements, wt.W)
|
||||
|
||||
user, err := wt.W.CurrentUser.Me(ctx)
|
||||
require.NoError(t, err)
|
||||
if user != nil {
|
||||
testcli.PrepareReplacementsUser(t, replacements, *user)
|
||||
}
|
||||
|
||||
tmpDir1 := pythontest.RequirePythonVENV(t, ctx, pythonVersion, true)
|
||||
extras, ok := extraInstalls[pythonVersion]
|
||||
if ok {
|
||||
args := append([]string{"pip", "install"}, extras...)
|
||||
testutil.RunCommand(t, "uv", args...)
|
||||
}
|
||||
|
||||
projectName := "project_name_" + uniqueProjectId
|
||||
|
||||
initConfig := map[string]string{
|
||||
"project_name": projectName,
|
||||
"include_notebook": "yes",
|
||||
"include_python": "yes",
|
||||
"include_dlt": "yes",
|
||||
}
|
||||
b, err := json.Marshal(initConfig)
|
||||
require.NoError(t, err)
|
||||
err = os.WriteFile(filepath.Join(tmpDir1, "config.json"), b, 0o644)
|
||||
require.NoError(t, err)
|
||||
|
||||
testcli.RequireOutput(t, ctx, []string{"bundle", "init", "default-python", "--config-file", "config.json"}, "testdata/default_python/bundle_init.txt")
|
||||
testutil.Chdir(t, projectName)
|
||||
|
||||
testcli.RequireOutput(t, ctx, []string{"bundle", "validate"}, "testdata/default_python/bundle_validate.txt")
|
||||
|
||||
testcli.RequireOutput(t, ctx, []string{"bundle", "deploy"}, "testdata/default_python/bundle_deploy.txt")
|
||||
t.Cleanup(func() {
|
||||
// Delete the stack
|
||||
testcli.RequireSuccessfulRun(t, ctx, "bundle", "destroy", "--auto-approve")
|
||||
})
|
||||
|
||||
ignoredFields := []string{
|
||||
"/resources/jobs/project_name_$UNIQUE_PRJ_job/email_notifications",
|
||||
"/resources/jobs/project_name_$UNIQUE_PRJ_job/job_clusters/0/new_cluster/node_type_id",
|
||||
"/resources/jobs/project_name_$UNIQUE_PRJ_job/url",
|
||||
"/resources/pipelines/project_name_$UNIQUE_PRJ_pipeline/catalog",
|
||||
"/resources/pipelines/project_name_$UNIQUE_PRJ_pipeline/url",
|
||||
"/workspace/current_user/externalId",
|
||||
"/workspace/current_user/groups",
|
||||
"/workspace/current_user/name/familyName",
|
||||
}
|
||||
|
||||
testcli.RequireOutputJQ(t, ctx, []string{"bundle", "summary", "--output", "json"}, "testdata/default_python/bundle_summary.txt", ignoredFields)
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
Building project_name_$UNIQUE_PRJ...
|
||||
Uploading project_name_$UNIQUE_PRJ-0.0.1+<NUMID>.<NUMID>-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
|
@ -0,0 +1,8 @@
|
|||
|
||||
Welcome to the default Python template for Databricks Asset Bundles!
|
||||
Workspace to use (auto-detected, edit in 'project_name_$UNIQUE_PRJ/databricks.yml'): https://$DATABRICKS_HOST
|
||||
|
||||
✨ Your new project has been created in the 'project_name_$UNIQUE_PRJ' directory!
|
||||
|
||||
Please refer to the README.md file for "getting started" instructions.
|
||||
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
|
|
@ -0,0 +1,185 @@
|
|||
{
|
||||
"bundle": {
|
||||
"name": "project_name_$UNIQUE_PRJ",
|
||||
"target": "dev",
|
||||
"environment": "dev",
|
||||
"terraform": {
|
||||
"exec_path": "/tmp/.../terraform"
|
||||
},
|
||||
"git": {
|
||||
"bundle_root_path": ".",
|
||||
"inferred": true
|
||||
},
|
||||
"mode": "development",
|
||||
"deployment": {
|
||||
"lock": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"resources/project_name_$UNIQUE_PRJ.job.yml",
|
||||
"resources/project_name_$UNIQUE_PRJ.pipeline.yml"
|
||||
],
|
||||
"workspace": {
|
||||
"host": "https://$DATABRICKS_HOST",
|
||||
"current_user": {
|
||||
"active": true,
|
||||
"displayName": "$USERNAME",
|
||||
"emails": [
|
||||
{
|
||||
"primary": true,
|
||||
"type": "work",
|
||||
"value": "$USERNAME"
|
||||
}
|
||||
],
|
||||
"groups": [
|
||||
{
|
||||
"$ref": "Groups/$USER.Groups[0]",
|
||||
"display": "team.engineering",
|
||||
"type": "direct",
|
||||
"value": "$USER.Groups[0]"
|
||||
}
|
||||
],
|
||||
"id": "$USER.Id",
|
||||
"name": {
|
||||
"familyName": "$USERNAME",
|
||||
"givenName": "$USERNAME"
|
||||
},
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
"urn:ietf:params:scim:schemas:extension:workspace:2.0:User"
|
||||
],
|
||||
"short_name": "$USERNAME",
|
||||
"userName": "$USERNAME"
|
||||
},
|
||||
"root_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev",
|
||||
"file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files",
|
||||
"resource_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/resources",
|
||||
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/artifacts",
|
||||
"state_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state"
|
||||
},
|
||||
"resources": {
|
||||
"jobs": {
|
||||
"project_name_$UNIQUE_PRJ_job": {
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"email_notifications": {
|
||||
"on_failure": [
|
||||
"$USERNAME"
|
||||
]
|
||||
},
|
||||
"format": "MULTI_TASK",
|
||||
"id": "<NUMID>",
|
||||
"job_clusters": [
|
||||
{
|
||||
"job_cluster_key": "job_cluster",
|
||||
"new_cluster": {
|
||||
"autoscale": {
|
||||
"max_workers": 4,
|
||||
"min_workers": 1
|
||||
},
|
||||
"node_type_id": "i3.xlarge",
|
||||
"spark_version": "15.4.x-scala2.12"
|
||||
}
|
||||
}
|
||||
],
|
||||
"max_concurrent_runs": 4,
|
||||
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_job",
|
||||
"queue": {
|
||||
"enabled": true
|
||||
},
|
||||
"tags": {
|
||||
"dev": "$USERNAME"
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"job_cluster_key": "job_cluster",
|
||||
"notebook_task": {
|
||||
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/notebook"
|
||||
},
|
||||
"task_key": "notebook_task"
|
||||
},
|
||||
{
|
||||
"depends_on": [
|
||||
{
|
||||
"task_key": "notebook_task"
|
||||
}
|
||||
],
|
||||
"pipeline_task": {
|
||||
"pipeline_id": "${resources.pipelines.project_name_$UNIQUE_PRJ_pipeline.id}"
|
||||
},
|
||||
"task_key": "refresh_pipeline"
|
||||
},
|
||||
{
|
||||
"depends_on": [
|
||||
{
|
||||
"task_key": "refresh_pipeline"
|
||||
}
|
||||
],
|
||||
"job_cluster_key": "job_cluster",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "dist/*.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "main",
|
||||
"package_name": "project_name_$UNIQUE_PRJ"
|
||||
},
|
||||
"task_key": "main_task"
|
||||
}
|
||||
],
|
||||
"trigger": {
|
||||
"pause_status": "PAUSED",
|
||||
"periodic": {
|
||||
"interval": 1,
|
||||
"unit": "DAYS"
|
||||
}
|
||||
},
|
||||
"url": "https://$DATABRICKS_HOST/jobs/<NUMID>?o=<NUMID>"
|
||||
}
|
||||
},
|
||||
"pipelines": {
|
||||
"project_name_$UNIQUE_PRJ_pipeline": {
|
||||
"catalog": "main",
|
||||
"configuration": {
|
||||
"bundle.sourcePath": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src"
|
||||
},
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
|
||||
},
|
||||
"development": true,
|
||||
"id": "<UUID>",
|
||||
"libraries": [
|
||||
{
|
||||
"notebook": {
|
||||
"path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/dlt_pipeline"
|
||||
}
|
||||
}
|
||||
],
|
||||
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_pipeline",
|
||||
"target": "project_name_$UNIQUE_PRJ_dev",
|
||||
"url": "https://$DATABRICKS_HOST/pipelines/<UUID>?o=<NUMID>"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sync": {
|
||||
"paths": [
|
||||
"."
|
||||
]
|
||||
},
|
||||
"presets": {
|
||||
"name_prefix": "[dev $USERNAME] ",
|
||||
"pipelines_development": true,
|
||||
"trigger_pause_status": "PAUSED",
|
||||
"jobs_max_concurrent_runs": 4,
|
||||
"tags": {
|
||||
"dev": "$USERNAME"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
Name: project_name_$UNIQUE_PRJ
|
||||
Target: dev
|
||||
Workspace:
|
||||
Host: https://$DATABRICKS_HOST
|
||||
User: $USERNAME
|
||||
Path: /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev
|
||||
|
||||
Validation OK!
|
|
@ -0,0 +1,266 @@
|
|||
package testcli
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/databricks/cli/libs/iamutil"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||
"github.com/elliotchance/orderedmap/v3"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/wI2L/jsondiff"
|
||||
)
|
||||
|
||||
func ReadFile(t testutil.TestingT, ctx context.Context, filename string) string {
|
||||
data, err := os.ReadFile(filename)
|
||||
if os.IsNotExist(err) {
|
||||
return ""
|
||||
}
|
||||
require.NoError(t, err)
|
||||
return string(data)
|
||||
}
|
||||
|
||||
func WriteFile(t testutil.TestingT, ctx context.Context, filename, data string) {
|
||||
t.Logf("Overwriting %s", filename)
|
||||
err := os.WriteFile(filename, []byte(data), 0o644)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func captureOutput(t testutil.TestingT, ctx context.Context, args []string) string {
|
||||
t.Logf("run args: [%s]", strings.Join(args, ", "))
|
||||
r := NewRunner(t, ctx, args...)
|
||||
stdout, stderr, err := r.Run()
|
||||
require.NoError(t, err)
|
||||
out := stderr.String() + stdout.String()
|
||||
return ReplaceOutput(t, ctx, out)
|
||||
}
|
||||
|
||||
func assertEqualTexts(t testutil.TestingT, filename1, filename2, expected, out string) {
|
||||
if len(out) < 1000 && len(expected) < 1000 {
|
||||
// This shows full strings + diff which could be useful when debugging newlines
|
||||
assert.Equal(t, expected, out)
|
||||
} else {
|
||||
// only show diff for large texts
|
||||
diff := testutil.Diff(filename1, filename2, expected, out)
|
||||
t.Errorf("Diff:\n" + diff)
|
||||
}
|
||||
}
|
||||
|
||||
func logDiff(t testutil.TestingT, filename1, filename2, expected, out string) {
|
||||
diff := testutil.Diff(filename1, filename2, expected, out)
|
||||
t.Logf("Diff:\n" + diff)
|
||||
}
|
||||
|
||||
func RequireOutput(t testutil.TestingT, ctx context.Context, args []string, expectedFilename string) {
|
||||
_, filename, _, _ := runtime.Caller(1)
|
||||
dir := filepath.Dir(filename)
|
||||
expectedPath := filepath.Join(dir, expectedFilename)
|
||||
expected := ReadFile(t, ctx, expectedPath)
|
||||
|
||||
out := captureOutput(t, ctx, args)
|
||||
|
||||
if out != expected {
|
||||
actual := fmt.Sprintf("Output from %v", args)
|
||||
assertEqualTexts(t, expectedFilename, actual, expected, out)
|
||||
|
||||
if os.Getenv("TESTS_OUTPUT") == "OVERWRITE" {
|
||||
WriteFile(t, ctx, expectedPath, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func RequireOutputJQ(t testutil.TestingT, ctx context.Context, args []string, expectedFilename string, ignorePaths []string) {
|
||||
_, filename, _, _ := runtime.Caller(1)
|
||||
dir := filepath.Dir(filename)
|
||||
expectedPath := filepath.Join(dir, expectedFilename)
|
||||
expected := ReadFile(t, ctx, expectedPath)
|
||||
|
||||
out := captureOutput(t, ctx, args)
|
||||
|
||||
if out != expected {
|
||||
patch, err := jsondiff.CompareJSON([]byte(expected), []byte(out))
|
||||
actual := fmt.Sprintf("Output from %v", args)
|
||||
if err != nil {
|
||||
t.Logf("CompareJSON error for %s vs %s: %s (fallback to textual comparison)", args, expectedFilename, err)
|
||||
assertEqualTexts(t, expectedFilename, actual, expected, out)
|
||||
} else {
|
||||
logDiff(t, expectedFilename, actual, expected, out)
|
||||
ignoredDiffs := []string{}
|
||||
erroredDiffs := []string{}
|
||||
for _, op := range patch {
|
||||
if matchesPrefixes(ignorePaths, op.Path) {
|
||||
ignoredDiffs = append(ignoredDiffs, fmt.Sprintf("%7s %s %v", op.Type, op.Path, op.Value))
|
||||
} else {
|
||||
erroredDiffs = append(erroredDiffs, fmt.Sprintf("%7s %s %v", op.Type, op.Path, op.Value))
|
||||
}
|
||||
}
|
||||
if len(ignoredDiffs) > 0 {
|
||||
t.Logf("Ignored differences between %s and %s:\n ==> %s", expectedFilename, args, strings.Join(ignoredDiffs, "\n ==> "))
|
||||
}
|
||||
if len(erroredDiffs) > 0 {
|
||||
t.Errorf("Unexpected differences between %s and %s:\n ==> %s", expectedFilename, args, strings.Join(erroredDiffs, "\n ==> "))
|
||||
}
|
||||
}
|
||||
|
||||
if os.Getenv("TESTS_OUTPUT") == "OVERWRITE" {
|
||||
WriteFile(t, ctx, filepath.Join(dir, expectedFilename), out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func matchesPrefixes(prefixes []string, path string) bool {
|
||||
for _, p := range prefixes {
|
||||
if p == path {
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(path, p+"/") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var (
|
||||
uuidRegex = regexp.MustCompile(`[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}`)
|
||||
numIdRegex = regexp.MustCompile(`[0-9]{3,}`)
|
||||
privatePathRegex = regexp.MustCompile(`(/tmp|/private)(/.*)/([a-zA-Z0-9]+)`)
|
||||
)
|
||||
|
||||
func ReplaceOutput(t testutil.TestingT, ctx context.Context, out string) string {
|
||||
out = NormalizeNewlines(out)
|
||||
replacements := GetReplacementsMap(ctx)
|
||||
if replacements == nil {
|
||||
t.Fatal("WithReplacementsMap was not called")
|
||||
}
|
||||
for key, value := range replacements.AllFromFront() {
|
||||
out = strings.ReplaceAll(out, key, value)
|
||||
}
|
||||
|
||||
out = uuidRegex.ReplaceAllString(out, "<UUID>")
|
||||
out = numIdRegex.ReplaceAllString(out, "<NUMID>")
|
||||
out = privatePathRegex.ReplaceAllString(out, "/tmp/.../$3")
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type key int
|
||||
|
||||
const (
|
||||
replacementsMapKey = key(1)
|
||||
)
|
||||
|
||||
func WithReplacementsMap(ctx context.Context) (context.Context, *orderedmap.OrderedMap[string, string]) {
|
||||
value := ctx.Value(replacementsMapKey)
|
||||
if value != nil {
|
||||
if existingMap, ok := value.(*orderedmap.OrderedMap[string, string]); ok {
|
||||
return ctx, existingMap
|
||||
}
|
||||
}
|
||||
|
||||
newMap := orderedmap.NewOrderedMap[string, string]()
|
||||
ctx = context.WithValue(ctx, replacementsMapKey, newMap)
|
||||
return ctx, newMap
|
||||
}
|
||||
|
||||
func GetReplacementsMap(ctx context.Context) *orderedmap.OrderedMap[string, string] {
|
||||
value := ctx.Value(replacementsMapKey)
|
||||
if value != nil {
|
||||
if existingMap, ok := value.(*orderedmap.OrderedMap[string, string]); ok {
|
||||
return existingMap
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setKV(replacements *orderedmap.OrderedMap[string, string], key, value string) {
|
||||
if key == "" || value == "" {
|
||||
return
|
||||
}
|
||||
replacements.Set(key, value)
|
||||
}
|
||||
|
||||
func PrepareReplacements(t testutil.TestingT, replacements *orderedmap.OrderedMap[string, string], w *databricks.WorkspaceClient) {
|
||||
// in some clouds (gcp) w.Config.Host includes "https://" prefix in others it's really just a host (azure)
|
||||
host := strings.TrimPrefix(strings.TrimPrefix(w.Config.Host, "http://"), "https://")
|
||||
setKV(replacements, host, "$DATABRICKS_HOST")
|
||||
setKV(replacements, w.Config.ClusterID, "$DATABRICKS_CLUSTER_ID")
|
||||
setKV(replacements, w.Config.WarehouseID, "$DATABRICKS_WAREHOUSE_ID")
|
||||
setKV(replacements, w.Config.ServerlessComputeID, "$DATABRICKS_SERVERLESS_COMPUTE_ID")
|
||||
setKV(replacements, w.Config.MetadataServiceURL, "$DATABRICKS_METADATA_SERVICE_URL")
|
||||
setKV(replacements, w.Config.AccountID, "$DATABRICKS_ACCOUNT_ID")
|
||||
setKV(replacements, w.Config.Token, "$DATABRICKS_TOKEN")
|
||||
setKV(replacements, w.Config.Username, "$DATABRICKS_USERNAME")
|
||||
setKV(replacements, w.Config.Password, "$DATABRICKS_PASSWORD")
|
||||
setKV(replacements, w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
|
||||
setKV(replacements, w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE")
|
||||
setKV(replacements, w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT")
|
||||
setKV(replacements, w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS")
|
||||
setKV(replacements, w.Config.AzureResourceID, "$DATABRICKS_AZURE_RESOURCE_ID")
|
||||
setKV(replacements, w.Config.AzureClientSecret, "$ARM_CLIENT_SECRET")
|
||||
// setKV(replacements, w.Config.AzureClientID, "$ARM_CLIENT_ID")
|
||||
setKV(replacements, w.Config.AzureClientID, "$USERNAME")
|
||||
setKV(replacements, w.Config.AzureTenantID, "$ARM_TENANT_ID")
|
||||
setKV(replacements, w.Config.ActionsIDTokenRequestURL, "$ACTIONS_ID_TOKEN_REQUEST_URL")
|
||||
setKV(replacements, w.Config.ActionsIDTokenRequestToken, "$ACTIONS_ID_TOKEN_REQUEST_TOKEN")
|
||||
setKV(replacements, w.Config.AzureEnvironment, "$ARM_ENVIRONMENT")
|
||||
setKV(replacements, w.Config.ClientID, "$DATABRICKS_CLIENT_ID")
|
||||
setKV(replacements, w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET")
|
||||
setKV(replacements, w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
|
||||
setKV(replacements, w.Config.AuthType, "$DATABRICKS_AUTH_TYPE")
|
||||
}
|
||||
|
||||
func PrepareReplacementsUser(t testutil.TestingT, replacements *orderedmap.OrderedMap[string, string], u iam.User) {
|
||||
// There could be exact matches or overlap between different name fields, so sort them by length
|
||||
// to ensure we match the largest one first and map them all to the same token
|
||||
names := []string{
|
||||
u.DisplayName,
|
||||
u.UserName,
|
||||
iamutil.GetShortUserName(&u),
|
||||
u.Name.FamilyName,
|
||||
u.Name.GivenName,
|
||||
}
|
||||
if u.Name != nil {
|
||||
names = append(names, u.Name.FamilyName)
|
||||
names = append(names, u.Name.GivenName)
|
||||
}
|
||||
for _, val := range u.Emails {
|
||||
names = append(names, val.Value)
|
||||
}
|
||||
stableSortReverseLength(names)
|
||||
|
||||
for _, name := range names {
|
||||
setKV(replacements, name, "$USERNAME")
|
||||
}
|
||||
|
||||
for ind, val := range u.Groups {
|
||||
setKV(replacements, val.Value, fmt.Sprintf("$USER.Groups[%d]", ind))
|
||||
}
|
||||
|
||||
setKV(replacements, u.Id, "$USER.Id")
|
||||
|
||||
for ind, val := range u.Roles {
|
||||
setKV(replacements, val.Value, fmt.Sprintf("$USER.Roles[%d]", ind))
|
||||
}
|
||||
|
||||
// Schemas []UserSchema `json:"schemas,omitempty"`
|
||||
}
|
||||
|
||||
func stableSortReverseLength(strs []string) {
|
||||
slices.SortStableFunc(strs, func(a, b string) int {
|
||||
return len(b) - len(a)
|
||||
})
|
||||
}
|
||||
|
||||
func NormalizeNewlines(input string) string {
|
||||
output := strings.ReplaceAll(input, "\r\n", "\n")
|
||||
return strings.ReplaceAll(output, "\r", "\n")
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package testcli
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSort(t *testing.T) {
|
||||
input := []string{"a", "bc", "cd"}
|
||||
stableSortReverseLength(input)
|
||||
assert.Equal(t, []string{"bc", "cd", "a"}, input)
|
||||
}
|
||||
|
||||
func TestMatchesPrefixes(t *testing.T) {
|
||||
assert.False(t, matchesPrefixes([]string{}, ""))
|
||||
assert.False(t, matchesPrefixes([]string{"/hello", "/hello/world"}, ""))
|
||||
assert.True(t, matchesPrefixes([]string{"/hello", "/a/b"}, "/hello"))
|
||||
assert.True(t, matchesPrefixes([]string{"/hello", "/a/b"}, "/a/b/c"))
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
package testutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hexops/gotextdiff"
|
||||
"github.com/hexops/gotextdiff/myers"
|
||||
"github.com/hexops/gotextdiff/span"
|
||||
)
|
||||
|
||||
func Diff(filename1, filename2, s1, s2 string) string {
|
||||
edits := myers.ComputeEdits(span.URIFromPath(filename1), s1, s2)
|
||||
return fmt.Sprint(gotextdiff.ToUnified(filename1, filename2, s1, edits))
|
||||
}
|
Loading…
Reference in New Issue