This commit is contained in:
Shreyas Goenka 2025-02-05 13:13:14 +01:00
commit 2253eb0590
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
27 changed files with 185 additions and 87 deletions

1
.github/CODEOWNERS vendored
View File

@ -1 +1,2 @@
* @pietern @andrewnester @shreyas-goenka @denik
cmd/labs @alexott @nfx

View File

@ -159,7 +159,6 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
testdiff.PrepareReplacementsDevVersion(t, &repls)
testdiff.PrepareReplacementSdkVersion(t, &repls)
testdiff.PrepareReplacementsGoVersion(t, &repls)
testdiff.PrepareReplaceOS(t, &repls)
testDirs := getTests(t)
require.NotEmpty(t, testDirs)
@ -258,7 +257,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
server = testserver.New(t)
server.HandleUnknown()
server.RecordRequests = config.RecordRequests
server.IncludeReqHeaders = config.IncludeReqHeaders
server.IncludeRequestHeaders = config.IncludeRequestHeaders
// If no custom server stubs are defined, add the default handlers.
if len(config.Server) == 0 {
@ -267,8 +266,12 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
for _, stub := range config.Server {
require.NotEmpty(t, stub.Pattern)
server.Handle(stub.Pattern, func(req *http.Request) (resp any, err error) {
return stub.Response.Body, nil
server.Handle(stub.Pattern, func(req *http.Request) (any, int) {
statusCode := http.StatusOK
if stub.Response.StatusCode != 0 {
statusCode = stub.Response.StatusCode
}
return stub.Response.Body, statusCode
})
}
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+server.URL)

View File

@ -0,0 +1,5 @@
[DEFAULT]
host = $DATABRICKS_HOST
[profile_name]
host = https://test@non-existing-subdomain.databricks.com

View File

@ -0,0 +1,14 @@
bundle:
name: test-auth
workspace:
host: $DATABRICKS_HOST
targets:
dev:
default: true
workspace:
host: $DATABRICKS_HOST
prod:
workspace:
host: https://bar.com

View File

@ -0,0 +1,32 @@
=== Inside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Inside the bundle, target flags
>>> errcode [CLI] current-user me -t dev
"[USERNAME]"
=== Inside the bundle, target and matching profile
>>> errcode [CLI] current-user me -t dev -p DEFAULT
"[USERNAME]"
=== Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail
>>> errcode [CLI] current-user me -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_URL]
Exit code: 1
=== Inside the bundle, target and not matching profile
>>> errcode [CLI] current-user me -t dev -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_URL]
Exit code: 1
=== Outside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Outside the bundle, profile flag
>>> errcode [CLI] current-user me -p profile_name
"[USERNAME]"

View File

@ -0,0 +1,30 @@
# Replace placeholder with an actual host URL
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
envsubst < .databrickscfg > out && mv out .databrickscfg
export DATABRICKS_CONFIG_FILE=.databrickscfg
host=$DATABRICKS_HOST
unset DATABRICKS_HOST
title "Inside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Inside the bundle, target flags"
trace errcode $CLI current-user me -t dev | jq .userName
title "Inside the bundle, target and matching profile"
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
title "Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail"
trace errcode $CLI current-user me -p profile_name | jq .userName
title "Inside the bundle, target and not matching profile"
trace errcode $CLI current-user me -t dev -p profile_name
cd ..
export DATABRICKS_HOST=$host
title "Outside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Outside the bundle, profile flag"
trace errcode $CLI current-user me -p profile_name | jq .userName

View File

@ -0,0 +1,8 @@
Badness = "When -p flag is used inside the bundle folder for any CLI commands, CLI use bundle host anyway instead of profile one"
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
[[Repls]]
Old='DATABRICKS_HOST'
New='DATABRICKS_URL'

View File

@ -1,7 +1,7 @@
Error: Files in the 'include' configuration section must be YAML files.
Error: Files in the 'include' configuration section must be YAML or JSON files.
in databricks.yml:5:4
The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
The file test.py in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
Name: non_yaml_in_includes

View File

@ -15,7 +15,7 @@ import (
func StartCmdServer(t *testing.T) *testserver.Server {
server := testserver.New(t)
server.Handle("/", func(r *http.Request) (any, error) {
server.Handle("/", func(r *http.Request) (any, int) {
q := r.URL.Query()
args := strings.Split(q.Get("args"), " ")
@ -40,7 +40,7 @@ func StartCmdServer(t *testing.T) *testserver.Server {
exitcode = 1
}
result["exitcode"] = exitcode
return result, nil
return result, http.StatusOK
})
return server
}

View File

@ -47,8 +47,9 @@ type TestConfig struct {
// Record the requests made to the server and write them as output to
// out.requests.txt
RecordRequests bool
// Include the following request headers in the recorded requests
IncludeReqHeaders []string
// List of request headers to include when recording requests.
IncludeRequestHeaders []string
}
type ServerStub struct {
@ -59,7 +60,8 @@ type ServerStub struct {
// The response body to return.
Response struct {
Body string
Body string
StatusCode int
}
}

View File

@ -12,7 +12,7 @@ import (
)
func AddHandlers(server *testserver.Server) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, int) {
return compute.ListPoliciesResponse{
Policies: []compute.Policy{
{
@ -24,10 +24,10 @@ func AddHandlers(server *testserver.Server) {
Name: "some-test-cluster-policy",
},
},
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, int) {
return compute.ListInstancePools{
InstancePools: []compute.InstancePoolAndStats{
{
@ -35,10 +35,10 @@ func AddHandlers(server *testserver.Server) {
InstancePoolId: "1234",
},
},
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, int) {
return compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{
{
@ -50,32 +50,32 @@ func AddHandlers(server *testserver.Server) {
ClusterId: "9876",
},
},
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, int) {
return iam.User{
Id: "1000012345",
UserName: "tester@databricks.com",
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, int) {
return workspace.ObjectInfo{
ObjectId: 1001,
ObjectType: "DIRECTORY",
Path: "",
ResourceId: "1001",
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, int) {
return catalog.MetastoreAssignment{
DefaultCatalogName: "main",
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, int) {
return workspace.WorkspaceObjectPermissions{
ObjectId: "1001",
ObjectType: "DIRECTORY",
@ -89,11 +89,11 @@ func AddHandlers(server *testserver.Server) {
},
},
},
}, nil
}, http.StatusOK
})
server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, error) {
return "{}", nil
server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, int) {
return "{}", http.StatusOK
})
server.Handle("GET /oidc/.well-known/oauth-authorization-server", func(r *http.Request) (any, error) {

View File

@ -0,0 +1 @@
{"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}

View File

@ -0,0 +1,5 @@
>>> [CLI] jobs create --json {"name":"abc"}
Error: Invalid access token.
Exit code: 1

View File

@ -0,0 +1 @@
trace $CLI jobs create --json '{"name":"abc"}'

View File

@ -0,0 +1,12 @@
LocalOnly = true # request recording currently does not work with cloud environment
RecordRequests = true
[[Server]]
Pattern = "POST /api/2.1/jobs/create"
Response.Body = '''
{
"error_code": "PERMISSION_DENIED",
"message": "Invalid access token."
}
'''
Response.StatusCode = 403

View File

@ -1 +1 @@
{"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}
{"headers":{"Authorization":"Bearer dapi1234","User-Agent":"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/jobs_create cmd-exec-id/[UUID] auth/pat"},"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}

View File

@ -1,5 +1,6 @@
LocalOnly = true # request recording currently does not work with cloud environment
RecordRequests = true
IncludeRequestHeaders = ["Authorization", "User-Agent"]
[[Server]]
Pattern = "POST /api/2.1/jobs/create"
@ -8,3 +9,19 @@ Response.Body = '''
"job_id": 1111
}
'''
[[Repls]]
Old = "(linux|darwin|windows)"
New = "[OS]"
[[Repls]]
Old = " upstream/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " upstream-version/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " cicd/[A-Za-z0-9.-]+"
New = ""

View File

@ -71,11 +71,11 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
continue
}
seen[rel] = true
if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" {
if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" && filepath.Ext(rel) != ".json" {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: "Files in the 'include' configuration section must be YAML files.",
Detail: fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
Summary: "Files in the 'include' configuration section must be YAML or JSON files.",
Detail: fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
Locations: b.Config.GetLocations(fmt.Sprintf("include[%d]", i)),
})
continue

View File

@ -101,9 +101,9 @@ var envCopy = []string{
// same auxiliary programs (e.g. `az`, or `gcloud`) as the CLI.
"PATH",
// Include $AZURE_CONFIG_FILE in set of environment variables to pass along.
// Include $AZURE_CONFIG_DIR in set of environment variables to pass along.
// This is set in Azure DevOps by the AzureCLI@2 task.
"AZURE_CONFIG_FILE",
"AZURE_CONFIG_DIR",
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration

View File

@ -292,7 +292,7 @@ func TestInheritEnvVars(t *testing.T) {
t.Setenv("HOME", "/home/testuser")
t.Setenv("PATH", "/foo:/bar")
t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc")
t.Setenv("AZURE_CONFIG_FILE", "/tmp/foo/bar")
t.Setenv("AZURE_CONFIG_DIR", "/tmp/foo/bar")
ctx := context.Background()
env := map[string]string{}
@ -301,7 +301,7 @@ func TestInheritEnvVars(t *testing.T) {
assert.Equal(t, "/home/testuser", env["HOME"])
assert.Equal(t, "/foo:/bar", env["PATH"])
assert.Equal(t, "/tmp/config.tfrc", env["TF_CLI_CONFIG_FILE"])
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_FILE"])
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_DIR"])
}
}

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# Configuration reference
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# <DABS> resources
<DABS> allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# Configuration reference
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# <DABS> resources
<DABS> allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).

View File

@ -1 +0,0 @@
* @nfx

View File

@ -226,8 +226,3 @@ func PrepareReplacementsGoVersion(t testutil.TestingT, r *ReplacementsContext) {
t.Helper()
r.Set(goVersion(), "[GO_VERSION]")
}
func PrepareReplaceOS(t testutil.TestingT, r *ReplacementsContext) {
t.Helper()
r.Set(runtime.GOOS, "[OS]")
}

View File

@ -20,17 +20,17 @@ type Server struct {
t testutil.TestingT
RecordRequests bool
IncludeReqHeaders []string
RecordRequests bool
IncludeRequestHeaders []string
Requests []Request
}
type Request struct {
Headers map[string]string `json:"headers,omitempty"`
Method string `json:"method"`
Path string `json:"path"`
Headers map[string]string `json:"headers,omitempty"`
Body any `json:"body,omitempty"`
Body any `json:"body"`
}
func New(t testutil.TestingT) *Server {
@ -45,32 +45,11 @@ func New(t testutil.TestingT) *Server {
}
}
func (s *Server) HandleUnknown() {
s.Handle("/", func(req *http.Request) (any, error) {
msg := fmt.Sprintf(`
unknown API request received. Please add a handler for this request in
your test. You can copy the following snippet in your test.toml file:
[[Server]]
Pattern = %s %s
Response = '''
<response here>
'''`, req.Method, req.URL.Path)
s.t.Fatalf(msg)
return nil, errors.New("unknown API request")
})
}
type HandlerFunc func(req *http.Request) (resp any, err error)
type HandlerFunc func(req *http.Request) (resp any, statusCode int)
func (s *Server) Handle(pattern string, handler HandlerFunc) {
s.Mux.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) {
resp, err := handler(r)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
resp, statusCode := handler(r)
if s.RecordRequests {
body, err := io.ReadAll(r.Body)
@ -78,36 +57,26 @@ func (s *Server) Handle(pattern string, handler HandlerFunc) {
headers := make(map[string]string)
for k, v := range r.Header {
if !slices.Contains(s.IncludeReqHeaders, k) {
continue
}
if len(v) == 0 {
if len(v) == 0 || !slices.Contains(s.IncludeRequestHeaders, k) {
continue
}
headers[k] = v[0]
}
var reqBody any
if len(body) > 0 && body[0] == '{' {
// serialize the body as is, if it's JSON
reqBody = json.RawMessage(body)
} else {
reqBody = string(body)
}
s.Requests = append(s.Requests, Request{
Headers: headers,
Method: r.Method,
Path: r.URL.Path,
Headers: headers,
Body: reqBody,
Body: json.RawMessage(body),
})
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(statusCode)
var respBytes []byte
var err error
respString, ok := resp.(string)
if ok {
respBytes = []byte(respString)