Add acceptance tests for builtin templates (#2135)

## Changes

To accommodate:
* Add the server URL to the set of output replacements
* Include a call to the permissions API to the dummy server
* Run the main script in a subshell to isolate working directory changes
This commit is contained in:
Pieter Noordhuis 2025-01-14 19:23:34 +01:00 committed by GitHub
parent 72e677d0ac
commit 82e35530b0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 161 additions and 6 deletions

View File

@ -66,7 +66,7 @@ func TestAccept(t *testing.T) {
server := StartServer(t) server := StartServer(t)
AddHandlers(server) AddHandlers(server)
// Redirect API access to local server: // Redirect API access to local server:
t.Setenv("DATABRICKS_HOST", fmt.Sprintf("http://127.0.0.1:%d", server.Port)) t.Setenv("DATABRICKS_HOST", server.URL)
t.Setenv("DATABRICKS_TOKEN", "dapi1234") t.Setenv("DATABRICKS_TOKEN", "dapi1234")
homeDir := t.TempDir() homeDir := t.TempDir()
@ -217,6 +217,11 @@ func doComparison(t *testing.T, pathExpected, pathNew, valueNew string) {
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir. // Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
func readMergedScriptContents(t *testing.T, dir string) string { func readMergedScriptContents(t *testing.T, dir string) string {
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript)) scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
// Wrap script contents in a subshell such that changing the working
// directory only affects the main script and not cleanup.
scriptContents = "(\n" + scriptContents + ")\n"
prepares := []string{} prepares := []string{}
cleanups := []string{} cleanups := []string{}

View File

@ -0,0 +1,6 @@
{
"project_name": "my_dbt_sql",
"http_path": "/sql/2.0/warehouses/f00dcafe",
"default_catalog": "main",
"personal_schemas": "yes, use a schema based on the current user name during development"
}

View File

@ -0,0 +1,32 @@
>>> $CLI bundle init dbt-sql --config-file ./input.json
Welcome to the dbt template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: http://$DATABRICKS_HOST
📊 Your new project has been created in the 'my_dbt_sql' directory!
If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started.
Refer to the README.md file for full "getting started" guide and production setup instructions.
>>> $CLI bundle validate -t dev
Name: my_dbt_sql
Target: dev
Workspace:
Host: http://$DATABRICKS_HOST
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/dev
Validation OK!
>>> $CLI bundle validate -t prod
Name: my_dbt_sql
Target: prod
Workspace:
Host: http://$DATABRICKS_HOST
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/prod
Validation OK!

View File

@ -0,0 +1,5 @@
trace $CLI bundle init dbt-sql --config-file ./input.json
cd my_dbt_sql
trace $CLI bundle validate -t dev
trace $CLI bundle validate -t prod

View File

@ -0,0 +1 @@
rm -fr my_dbt_sql

View File

@ -0,0 +1,6 @@
{
"project_name": "my_default_python",
"include_notebook": "yes",
"include_dlt": "yes",
"include_python": "yes"
}

View File

@ -0,0 +1,30 @@
>>> $CLI bundle init default-python --config-file ./input.json
Welcome to the default Python template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): http://$DATABRICKS_HOST
✨ Your new project has been created in the 'my_default_python' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev
Name: my_default_python
Target: dev
Workspace:
Host: http://$DATABRICKS_HOST
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/dev
Validation OK!
>>> $CLI bundle validate -t prod
Name: my_default_python
Target: prod
Workspace:
Host: http://$DATABRICKS_HOST
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/prod
Validation OK!

View File

@ -0,0 +1,5 @@
trace $CLI bundle init default-python --config-file ./input.json
cd my_default_python
trace $CLI bundle validate -t dev
trace $CLI bundle validate -t prod

View File

@ -0,0 +1 @@
rm -fr my_default_python

View File

@ -0,0 +1,6 @@
{
"project_name": "my_default_sql",
"http_path": "/sql/2.0/warehouses/f00dcafe",
"default_catalog": "main",
"personal_schemas": "yes, automatically use a schema based on the current user name during development"
}

View File

@ -0,0 +1,32 @@
>>> $CLI bundle init default-sql --config-file ./input.json
Welcome to the default SQL template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: http://$DATABRICKS_HOST
✨ Your new project has been created in the 'my_default_sql' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev
Name: my_default_sql
Target: dev
Workspace:
Host: http://$DATABRICKS_HOST
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/dev
Validation OK!
>>> $CLI bundle validate -t prod
Name: my_default_sql
Target: prod
Workspace:
Host: http://$DATABRICKS_HOST
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/prod
Validation OK!

View File

@ -0,0 +1,5 @@
trace $CLI bundle init default-sql --config-file ./input.json
cd my_default_sql
trace $CLI bundle validate -t dev
trace $CLI bundle validate -t prod

View File

@ -0,0 +1 @@
rm -fr my_default_sql

View File

@ -2,11 +2,11 @@ package acceptance_test
import ( import (
"encoding/json" "encoding/json"
"net"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"testing" "testing"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
@ -15,7 +15,6 @@ import (
type TestServer struct { type TestServer struct {
*httptest.Server *httptest.Server
Mux *http.ServeMux Mux *http.ServeMux
Port int
} }
type HandlerFunc func(r *http.Request) (any, error) type HandlerFunc func(r *http.Request) (any, error)
@ -23,12 +22,10 @@ type HandlerFunc func(r *http.Request) (any, error)
func NewTestServer() *TestServer { func NewTestServer() *TestServer {
mux := http.NewServeMux() mux := http.NewServeMux()
server := httptest.NewServer(mux) server := httptest.NewServer(mux)
port := server.Listener.Addr().(*net.TCPAddr).Port
return &TestServer{ return &TestServer{
Server: server, Server: server,
Mux: mux, Mux: mux,
Port: port,
} }
} }
@ -126,4 +123,27 @@ func AddHandlers(server *TestServer) {
ResourceId: "1001", ResourceId: "1001",
}, nil }, nil
}) })
server.Handle("/api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
return catalog.MetastoreAssignment{
DefaultCatalogName: "main",
}, nil
})
server.Handle("/api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
return workspace.WorkspaceObjectPermissions{
ObjectId: "1001",
ObjectType: "DIRECTORY",
AccessControlList: []workspace.WorkspaceObjectAccessControlResponse{
{
UserName: "tester@databricks.com",
AllPermissions: []workspace.WorkspaceObjectPermission{
{
PermissionLevel: "CAN_MANAGE",
},
},
},
},
}, nil
})
} }