mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin' into telemetry/logger-2
This commit is contained in:
commit
7c7f9d808d
|
@ -33,13 +33,16 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.23.2
|
||||
go-version: 1.23.4
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
|
||||
- name: Set go env
|
||||
run: |
|
||||
echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV
|
||||
|
@ -61,7 +64,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.23.2
|
||||
go-version: 1.23.4
|
||||
- name: Run go mod tidy
|
||||
run: |
|
||||
go mod tidy
|
||||
|
@ -85,7 +88,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.23.2
|
||||
go-version: 1.23.4
|
||||
|
||||
# Github repo: https://github.com/ajv-validator/ajv-cli
|
||||
- name: Install ajv-cli
|
||||
|
|
|
@ -31,7 +31,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.23.2
|
||||
go-version: 1.23.4
|
||||
|
||||
# The default cache key for this action considers only the `go.sum` file.
|
||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||
|
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: 1.23.2
|
||||
go-version: 1.23.4
|
||||
|
||||
# The default cache key for this action considers only the `go.sum` file.
|
||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||
|
|
8
NOTICE
8
NOTICE
|
@ -97,3 +97,11 @@ License - https://github.com/stretchr/testify/blob/master/LICENSE
|
|||
whilp/git-urls - https://github.com/whilp/git-urls
|
||||
Copyright (c) 2020 Will Maier
|
||||
License - https://github.com/whilp/git-urls/blob/master/LICENSE
|
||||
|
||||
github.com/wI2L/jsondiff v0.6.1
|
||||
Copyright (c) 2020-2024 William Poussier <william.poussier@gmail.com>
|
||||
License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
|
||||
|
||||
https://github.com/hexops/gotextdiff
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
||||
|
|
|
@ -16,12 +16,6 @@ type infer struct {
|
|||
func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
artifact := b.Config.Artifacts[m.name]
|
||||
|
||||
// TODO use python.DetectVEnvExecutable once bundle has a way to specify venv path
|
||||
py, err := python.DetectExecutable(ctx)
|
||||
if err != nil {
|
||||
return diag.FromErr(err)
|
||||
}
|
||||
|
||||
// Note: using --build-number (build tag) flag does not help with re-installing
|
||||
// libraries on all-purpose clusters. The reason is that `pip` ignoring build tag
|
||||
// when upgrading the library and only look at wheel version.
|
||||
|
@ -36,7 +30,9 @@ func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
|||
// version=datetime.datetime.utcnow().strftime("%Y%m%d.%H%M%S"),
|
||||
// ...
|
||||
//)
|
||||
artifact.BuildCommand = fmt.Sprintf(`"%s" setup.py bdist_wheel`, py)
|
||||
|
||||
py := python.GetExecutable()
|
||||
artifact.BuildCommand = fmt.Sprintf(`%s setup.py bdist_wheel`, py)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -541,7 +541,7 @@ func TestLoadDiagnosticsFile_nonExistent(t *testing.T) {
|
|||
|
||||
func TestInterpreterPath(t *testing.T) {
|
||||
if runtime.GOOS == "windows" {
|
||||
assert.Equal(t, "venv\\Scripts\\python3.exe", interpreterPath("venv"))
|
||||
assert.Equal(t, "venv\\Scripts\\python.exe", interpreterPath("venv"))
|
||||
} else {
|
||||
assert.Equal(t, "venv/bin/python3", interpreterPath("venv"))
|
||||
}
|
||||
|
@ -673,7 +673,7 @@ func withFakeVEnv(t *testing.T, venvPath string) {
|
|||
|
||||
func interpreterPath(venvPath string) string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return filepath.Join(venvPath, "Scripts", "python3.exe")
|
||||
return filepath.Join(venvPath, "Scripts", "python.exe")
|
||||
} else {
|
||||
return filepath.Join(venvPath, "bin", "python3")
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"os"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
yaml3 "gopkg.in/yaml.v3"
|
||||
|
@ -119,7 +120,15 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
missingAnnotations, err := convert.FromTyped(&d.missingAnnotations, dyn.NilValue)
|
||||
|
||||
for k := range d.missingAnnotations {
|
||||
if !isCliPath(k) {
|
||||
delete(d.missingAnnotations, k)
|
||||
fmt.Printf("Missing annotations for `%s` that are not in CLI package, try to fetch latest OpenAPI spec and regenerate annotations", k)
|
||||
}
|
||||
}
|
||||
|
||||
missingAnnotations, err := convert.FromTyped(d.missingAnnotations, dyn.NilValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -129,7 +138,13 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error
|
|||
return err
|
||||
}
|
||||
|
||||
err = saveYamlWithStyle(outputPath, output)
|
||||
var outputTyped annotationFile
|
||||
err = convert.ToTyped(&outputTyped, output)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = saveYamlWithStyle(outputPath, outputTyped)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -153,21 +168,50 @@ func assignAnnotation(s *jsonschema.Schema, a annotation) {
|
|||
s.Enum = a.Enum
|
||||
}
|
||||
|
||||
func saveYamlWithStyle(outputPath string, input dyn.Value) error {
|
||||
func saveYamlWithStyle(outputPath string, annotations annotationFile) error {
|
||||
annotationOrder := yamlsaver.NewOrder([]string{"description", "markdown_description", "title", "default", "enum"})
|
||||
style := map[string]yaml3.Style{}
|
||||
file, _ := input.AsMap()
|
||||
for _, v := range file.Keys() {
|
||||
style[v.MustString()] = yaml3.LiteralStyle
|
||||
|
||||
order := getAlphabeticalOrder(annotations)
|
||||
dynMap := map[string]dyn.Value{}
|
||||
for k, v := range annotations {
|
||||
style[k] = yaml3.LiteralStyle
|
||||
|
||||
properties := map[string]dyn.Value{}
|
||||
propertiesOrder := getAlphabeticalOrder(v)
|
||||
for key, value := range v {
|
||||
d, err := convert.FromTyped(value, dyn.NilValue)
|
||||
if d.Kind() == dyn.KindNil || err != nil {
|
||||
properties[key] = dyn.NewValue(map[string]dyn.Value{}, []dyn.Location{{Line: propertiesOrder.Get(key)}})
|
||||
continue
|
||||
}
|
||||
val, err := yamlsaver.ConvertToMapValue(value, annotationOrder, []string{}, map[string]dyn.Value{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
properties[key] = val.WithLocations([]dyn.Location{{Line: propertiesOrder.Get(key)}})
|
||||
}
|
||||
|
||||
dynMap[k] = dyn.NewValue(properties, []dyn.Location{{Line: order.Get(k)}})
|
||||
}
|
||||
|
||||
saver := yamlsaver.NewSaverWithStyle(style)
|
||||
err := saver.SaveAsYAML(file, outputPath, true)
|
||||
err := saver.SaveAsYAML(dynMap, outputPath, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getAlphabeticalOrder[T any](mapping map[string]T) *yamlsaver.Order {
|
||||
order := []string{}
|
||||
for k := range mapping {
|
||||
order = append(order, k)
|
||||
}
|
||||
slices.Sort(order)
|
||||
return yamlsaver.NewOrder(order)
|
||||
}
|
||||
|
||||
func convertLinksToAbsoluteUrl(s string) string {
|
||||
if s == "" {
|
||||
return s
|
||||
|
@ -207,3 +251,7 @@ func convertLinksToAbsoluteUrl(s string) string {
|
|||
|
||||
return result
|
||||
}
|
||||
|
||||
func isCliPath(path string) bool {
|
||||
return !strings.HasPrefix(path, "github.com/databricks/databricks-sdk-go")
|
||||
}
|
||||
|
|
|
@ -417,10 +417,10 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
|
|||
"lookup":
|
||||
"description": |-
|
||||
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
|
||||
"type":
|
||||
"markdown_description":
|
||||
"description": |-
|
||||
The type of the variable.
|
||||
"markdown_description":
|
||||
"type":
|
||||
"description": |-
|
||||
The type of the variable.
|
||||
github.com/databricks/cli/bundle/config/variable.Variable:
|
||||
|
@ -438,64 +438,3 @@ github.com/databricks/cli/bundle/config/variable.Variable:
|
|||
"type":
|
||||
"description": |-
|
||||
The type of the variable.
|
||||
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
|
||||
"ai21labs_api_key":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"ai21labs_api_key_plaintext":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
|
||||
"private_key":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"private_key_plaintext":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"project_id":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"region":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
|
||||
"microsoft_entra_client_id":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"microsoft_entra_client_secret":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"microsoft_entra_client_secret_plaintext":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"microsoft_entra_tenant_id":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_api_base":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_api_key":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_api_key_plaintext":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_api_type":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_api_version":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_deployment_name":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"openai_organization":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
|
||||
"palm_api_key":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
"palm_api_key_plaintext":
|
||||
"description": |-
|
||||
PLACEHOLDER
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -42,7 +42,8 @@ func copyFile(src, dst string) error {
|
|||
|
||||
// Checks whether descriptions are added for new config fields in the annotations.yml file
|
||||
// If this test fails either manually add descriptions to the `annotations.yml` or do the following:
|
||||
// 1. run `make schema` from the repository root to add placeholder descriptions
|
||||
// 1. for fields described outside of CLI package fetch latest schema from the OpenAPI spec and add path to file to DATABRICKS_OPENAPI_SPEC env variable
|
||||
// 2. run `make schema` from the repository root to add placeholder descriptions
|
||||
// 2. replace all "PLACEHOLDER" values with the actual descriptions if possible
|
||||
// 3. run `make schema` again to regenerate the schema with acutal descriptions
|
||||
func TestRequiredAnnotationsForNewFields(t *testing.T) {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
|
@ -9,7 +8,6 @@ import (
|
|||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn/yamlloader"
|
||||
"github.com/databricks/cli/libs/jsonschema"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
@ -83,7 +81,11 @@ func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
|
|||
// Skip if the type is not in the openapi spec.
|
||||
_, ok := p.ref[k]
|
||||
if !ok {
|
||||
continue
|
||||
k = mapIncorrectTypNames(k)
|
||||
_, ok = p.ref[k]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Return the first Go SDK type found in the openapi spec.
|
||||
|
@ -93,6 +95,23 @@ func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
|
|||
return jsonschema.Schema{}, false
|
||||
}
|
||||
|
||||
// Fix inconsistent type names between the Go SDK and the OpenAPI spec.
|
||||
// E.g. "serving.PaLmConfig" in the Go SDK is "serving.PaLMConfig" in the OpenAPI spec.
|
||||
func mapIncorrectTypNames(ref string) string {
|
||||
switch ref {
|
||||
case "serving.PaLmConfig":
|
||||
return "serving.PaLMConfig"
|
||||
case "serving.OpenAiConfig":
|
||||
return "serving.OpenAIConfig"
|
||||
case "serving.GoogleCloudVertexAiConfig":
|
||||
return "serving.GoogleCloudVertexAIConfig"
|
||||
case "serving.Ai21LabsConfig":
|
||||
return "serving.AI21LabsConfig"
|
||||
default:
|
||||
return ref
|
||||
}
|
||||
}
|
||||
|
||||
// Use the OpenAPI spec to load descriptions for the given type.
|
||||
func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error {
|
||||
annotations := annotationFile{}
|
||||
|
@ -142,31 +161,40 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
|
|||
return err
|
||||
}
|
||||
|
||||
b, err = yaml.Marshal(overrides)
|
||||
err = saveYamlWithStyle(overridesPath, overrides)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o, err := yamlloader.LoadYAML("", bytes.NewBuffer(b))
|
||||
err = saveYamlWithStyle(outputPath, annotations)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = saveYamlWithStyle(overridesPath, o)
|
||||
err = prependCommentToFile(outputPath, "# This file is auto-generated. DO NOT EDIT.\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b, err = yaml.Marshal(annotations)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b = bytes.Join([][]byte{[]byte("# This file is auto-generated. DO NOT EDIT."), b}, []byte("\n"))
|
||||
err = os.WriteFile(outputPath, b, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func prependCommentToFile(outputPath, comment string) error {
|
||||
b, err := os.ReadFile(outputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f, err := os.OpenFile(outputPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
_, err = f.WriteString(comment)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = f.Write(b)
|
||||
return err
|
||||
}
|
||||
|
||||
func addEmptyOverride(key, pkg string, overridesFile annotationFile) {
|
||||
if overridesFile[pkg] == nil {
|
||||
overridesFile[pkg] = map[string]annotation{}
|
||||
|
|
|
@ -2,7 +2,7 @@ module github.com/databricks/cli/bundle/internal/tf/codegen
|
|||
|
||||
go 1.23
|
||||
|
||||
toolchain go1.23.2
|
||||
toolchain go1.23.4
|
||||
|
||||
require (
|
||||
github.com/hashicorp/go-version v1.7.0
|
||||
|
|
|
@ -4772,9 +4772,11 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"ai21labs_api_key": {
|
||||
"description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"ai21labs_api_key_plaintext": {
|
||||
"description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
}
|
||||
},
|
||||
|
@ -5287,15 +5289,19 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"private_key": {
|
||||
"description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"private_key_plaintext": {
|
||||
"description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"project_id": {
|
||||
"description": "This is the Google Cloud project id that the service account is associated with.",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"region": {
|
||||
"description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.",
|
||||
"$ref": "#/$defs/string"
|
||||
}
|
||||
},
|
||||
|
@ -5313,36 +5319,47 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"microsoft_entra_client_id": {
|
||||
"description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"microsoft_entra_client_secret": {
|
||||
"description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"microsoft_entra_client_secret_plaintext": {
|
||||
"description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"microsoft_entra_tenant_id": {
|
||||
"description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_api_base": {
|
||||
"description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_api_key": {
|
||||
"description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_api_key_plaintext": {
|
||||
"description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_api_type": {
|
||||
"description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_api_version": {
|
||||
"description": "This is an optional field to specify the OpenAI API version.\nFor Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to\nutilize, specified by a date.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_deployment_name": {
|
||||
"description": "This field is only required for Azure OpenAI and is the name of the deployment resource for the\nAzure OpenAI service.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"openai_organization": {
|
||||
"description": "This is an optional field to specify the organization in OpenAI or Azure OpenAI.\n",
|
||||
"$ref": "#/$defs/string"
|
||||
}
|
||||
},
|
||||
|
@ -5360,9 +5377,11 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"palm_api_key": {
|
||||
"description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
},
|
||||
"palm_api_key_plaintext": {
|
||||
"description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
|
||||
"$ref": "#/$defs/string"
|
||||
}
|
||||
},
|
||||
|
|
8
go.mod
8
go.mod
|
@ -2,7 +2,7 @@ module github.com/databricks/cli
|
|||
|
||||
go 1.23
|
||||
|
||||
toolchain go1.23.2
|
||||
toolchain go1.23.4
|
||||
|
||||
require (
|
||||
github.com/Masterminds/semver/v3 v3.3.1 // MIT
|
||||
|
@ -14,6 +14,7 @@ require (
|
|||
github.com/hashicorp/hc-install v0.9.0 // MPL 2.0
|
||||
github.com/hashicorp/terraform-exec v0.21.0 // MPL 2.0
|
||||
github.com/hashicorp/terraform-json v0.23.0 // MPL 2.0
|
||||
github.com/hexops/gotextdiff v1.0.3 // BSD 3-Clause "New" or "Revised" License
|
||||
github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause
|
||||
github.com/mattn/go-isatty v0.0.20 // MIT
|
||||
github.com/nwidger/jsoncolor v0.3.2 // MIT
|
||||
|
@ -22,6 +23,7 @@ require (
|
|||
github.com/spf13/cobra v1.8.1 // Apache 2.0
|
||||
github.com/spf13/pflag v1.0.5 // BSD-3-Clause
|
||||
github.com/stretchr/testify v1.10.0 // MIT
|
||||
github.com/wI2L/jsondiff v0.6.1 // MIT
|
||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
|
||||
golang.org/x/mod v0.22.0
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
|
@ -55,6 +57,10 @@ require (
|
|||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tidwall/sjson v1.2.5 // indirect
|
||||
github.com/zclconf/go-cty v1.15.0 // indirect
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
|
||||
|
|
|
@ -109,6 +109,8 @@ github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVW
|
|||
github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg=
|
||||
github.com/hashicorp/terraform-json v0.23.0 h1:sniCkExU4iKtTADReHzACkk8fnpQXrdD2xoR+lppBkI=
|
||||
github.com/hashicorp/terraform-json v0.23.0/go.mod h1:MHdXbBAbSg0GvzuWazEGKAn/cyNfIB7mN6y7KJN6y2c=
|
||||
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
|
||||
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
|
@ -156,6 +158,18 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO
|
|||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||
github.com/wI2L/jsondiff v0.6.1 h1:ISZb9oNWbP64LHnu4AUhsMF5W0FIj5Ok3Krip9Shqpw=
|
||||
github.com/wI2L/jsondiff v0.6.1/go.mod h1:KAEIojdQq66oJiHhDyQez2x+sRit0vIzC9KeK0yizxM=
|
||||
github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM=
|
||||
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
|
||||
github.com/zclconf/go-cty v1.15.0 h1:tTCRWxsexYUmtt/wVxgDClUe+uQusuI443uL6e+5sXQ=
|
||||
|
|
|
@ -0,0 +1,132 @@
|
|||
package bundle_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/integration/internal/acc"
|
||||
"github.com/databricks/cli/internal/testcli"
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/databricks/cli/libs/python/pythontest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var pythonVersions = []string{
|
||||
"3.8",
|
||||
"3.9",
|
||||
"3.10",
|
||||
"3.11",
|
||||
"3.12",
|
||||
"3.13",
|
||||
}
|
||||
|
||||
var pythonVersionsShort = []string{
|
||||
"3.9",
|
||||
"3.12",
|
||||
}
|
||||
|
||||
var extraInstalls = map[string][]string{
|
||||
"3.12": {"setuptools"},
|
||||
"3.13": {"setuptools"},
|
||||
}
|
||||
|
||||
func TestDefaultPython(t *testing.T) {
|
||||
versions := pythonVersions
|
||||
if testing.Short() {
|
||||
versions = pythonVersionsShort
|
||||
}
|
||||
|
||||
for _, pythonVersion := range versions {
|
||||
t.Run(pythonVersion, func(t *testing.T) {
|
||||
testDefaultPython(t, pythonVersion)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func testDefaultPython(t *testing.T, pythonVersion string) {
|
||||
ctx, wt := acc.WorkspaceTest(t)
|
||||
|
||||
uniqueProjectId := testutil.RandomName("")
|
||||
ctx, replacements := testcli.WithReplacementsMap(ctx)
|
||||
replacements.Set(uniqueProjectId, "$UNIQUE_PRJ")
|
||||
|
||||
user, err := wt.W.CurrentUser.Me(ctx)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
testcli.PrepareReplacementsUser(t, replacements, *user)
|
||||
testcli.PrepareReplacements(t, replacements, wt.W)
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
testutil.Chdir(t, tmpDir)
|
||||
|
||||
opts := pythontest.VenvOpts{
|
||||
PythonVersion: pythonVersion,
|
||||
Dir: tmpDir,
|
||||
}
|
||||
|
||||
pythontest.RequireActivatedPythonEnv(t, ctx, &opts)
|
||||
extras, ok := extraInstalls[pythonVersion]
|
||||
if ok {
|
||||
args := append([]string{"pip", "install", "--python", opts.PythonExe}, extras...)
|
||||
cmd := exec.Command("uv", args...)
|
||||
require.NoError(t, cmd.Run())
|
||||
}
|
||||
|
||||
projectName := "project_name_" + uniqueProjectId
|
||||
|
||||
initConfig := map[string]string{
|
||||
"project_name": projectName,
|
||||
"include_notebook": "yes",
|
||||
"include_python": "yes",
|
||||
"include_dlt": "yes",
|
||||
}
|
||||
b, err := json.Marshal(initConfig)
|
||||
require.NoError(t, err)
|
||||
err = os.WriteFile(filepath.Join(tmpDir, "config.json"), b, 0o644)
|
||||
require.NoError(t, err)
|
||||
|
||||
testcli.AssertOutput(
|
||||
t,
|
||||
ctx,
|
||||
[]string{"bundle", "init", "default-python", "--config-file", "config.json"},
|
||||
testutil.TestData("testdata/default_python/bundle_init.txt"),
|
||||
)
|
||||
testutil.Chdir(t, projectName)
|
||||
|
||||
t.Cleanup(func() {
|
||||
// Delete the stack
|
||||
testcli.RequireSuccessfulRun(t, ctx, "bundle", "destroy", "--auto-approve")
|
||||
})
|
||||
|
||||
testcli.AssertOutput(
|
||||
t,
|
||||
ctx,
|
||||
[]string{"bundle", "validate"},
|
||||
testutil.TestData("testdata/default_python/bundle_validate.txt"),
|
||||
)
|
||||
testcli.AssertOutput(
|
||||
t,
|
||||
ctx,
|
||||
[]string{"bundle", "deploy"},
|
||||
testutil.TestData("testdata/default_python/bundle_deploy.txt"),
|
||||
)
|
||||
|
||||
testcli.AssertOutputJQ(
|
||||
t,
|
||||
ctx,
|
||||
[]string{"bundle", "summary", "--output", "json"},
|
||||
testutil.TestData("testdata/default_python/bundle_summary.txt"),
|
||||
[]string{
|
||||
"/bundle/terraform/exec_path",
|
||||
"/resources/jobs/project_name_$UNIQUE_PRJ_job/email_notifications",
|
||||
"/resources/jobs/project_name_$UNIQUE_PRJ_job/job_clusters/0/new_cluster/node_type_id",
|
||||
"/resources/jobs/project_name_$UNIQUE_PRJ_job/url",
|
||||
"/resources/pipelines/project_name_$UNIQUE_PRJ_pipeline/catalog",
|
||||
"/resources/pipelines/project_name_$UNIQUE_PRJ_pipeline/url",
|
||||
"/workspace/current_user",
|
||||
},
|
||||
)
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
Building project_name_$UNIQUE_PRJ...
|
||||
Uploading project_name_$UNIQUE_PRJ-0.0.1+<NUMID>.<NUMID>-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
|
@ -0,0 +1,8 @@
|
|||
|
||||
Welcome to the default Python template for Databricks Asset Bundles!
|
||||
Workspace to use (auto-detected, edit in 'project_name_$UNIQUE_PRJ/databricks.yml'): https://$DATABRICKS_HOST
|
||||
|
||||
✨ Your new project has been created in the 'project_name_$UNIQUE_PRJ' directory!
|
||||
|
||||
Please refer to the README.md file for "getting started" instructions.
|
||||
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
|
|
@ -0,0 +1,185 @@
|
|||
{
|
||||
"bundle": {
|
||||
"name": "project_name_$UNIQUE_PRJ",
|
||||
"target": "dev",
|
||||
"environment": "dev",
|
||||
"terraform": {
|
||||
"exec_path": "/tmp/.../terraform"
|
||||
},
|
||||
"git": {
|
||||
"bundle_root_path": ".",
|
||||
"inferred": true
|
||||
},
|
||||
"mode": "development",
|
||||
"deployment": {
|
||||
"lock": {
|
||||
"enabled": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"include": [
|
||||
"resources/project_name_$UNIQUE_PRJ.job.yml",
|
||||
"resources/project_name_$UNIQUE_PRJ.pipeline.yml"
|
||||
],
|
||||
"workspace": {
|
||||
"host": "https://$DATABRICKS_HOST",
|
||||
"current_user": {
|
||||
"active": true,
|
||||
"displayName": "$USERNAME",
|
||||
"emails": [
|
||||
{
|
||||
"primary": true,
|
||||
"type": "work",
|
||||
"value": "$USERNAME"
|
||||
}
|
||||
],
|
||||
"groups": [
|
||||
{
|
||||
"$ref": "Groups/$USER.Groups[0]",
|
||||
"display": "team.engineering",
|
||||
"type": "direct",
|
||||
"value": "$USER.Groups[0]"
|
||||
}
|
||||
],
|
||||
"id": "$USER.Id",
|
||||
"name": {
|
||||
"familyName": "$USERNAME",
|
||||
"givenName": "$USERNAME"
|
||||
},
|
||||
"schemas": [
|
||||
"urn:ietf:params:scim:schemas:core:2.0:User",
|
||||
"urn:ietf:params:scim:schemas:extension:workspace:2.0:User"
|
||||
],
|
||||
"short_name": "$USERNAME",
|
||||
"userName": "$USERNAME"
|
||||
},
|
||||
"root_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev",
|
||||
"file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files",
|
||||
"resource_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/resources",
|
||||
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/artifacts",
|
||||
"state_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state"
|
||||
},
|
||||
"resources": {
|
||||
"jobs": {
|
||||
"project_name_$UNIQUE_PRJ_job": {
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"email_notifications": {
|
||||
"on_failure": [
|
||||
"$USERNAME"
|
||||
]
|
||||
},
|
||||
"format": "MULTI_TASK",
|
||||
"id": "<NUMID>",
|
||||
"job_clusters": [
|
||||
{
|
||||
"job_cluster_key": "job_cluster",
|
||||
"new_cluster": {
|
||||
"autoscale": {
|
||||
"max_workers": 4,
|
||||
"min_workers": 1
|
||||
},
|
||||
"node_type_id": "i3.xlarge",
|
||||
"spark_version": "15.4.x-scala2.12"
|
||||
}
|
||||
}
|
||||
],
|
||||
"max_concurrent_runs": 4,
|
||||
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_job",
|
||||
"queue": {
|
||||
"enabled": true
|
||||
},
|
||||
"tags": {
|
||||
"dev": "$USERNAME"
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"job_cluster_key": "job_cluster",
|
||||
"notebook_task": {
|
||||
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/notebook"
|
||||
},
|
||||
"task_key": "notebook_task"
|
||||
},
|
||||
{
|
||||
"depends_on": [
|
||||
{
|
||||
"task_key": "notebook_task"
|
||||
}
|
||||
],
|
||||
"pipeline_task": {
|
||||
"pipeline_id": "${resources.pipelines.project_name_$UNIQUE_PRJ_pipeline.id}"
|
||||
},
|
||||
"task_key": "refresh_pipeline"
|
||||
},
|
||||
{
|
||||
"depends_on": [
|
||||
{
|
||||
"task_key": "refresh_pipeline"
|
||||
}
|
||||
],
|
||||
"job_cluster_key": "job_cluster",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "dist/*.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "main",
|
||||
"package_name": "project_name_$UNIQUE_PRJ"
|
||||
},
|
||||
"task_key": "main_task"
|
||||
}
|
||||
],
|
||||
"trigger": {
|
||||
"pause_status": "PAUSED",
|
||||
"periodic": {
|
||||
"interval": 1,
|
||||
"unit": "DAYS"
|
||||
}
|
||||
},
|
||||
"url": "https://$DATABRICKS_HOST/jobs/<NUMID>?o=<NUMID>"
|
||||
}
|
||||
},
|
||||
"pipelines": {
|
||||
"project_name_$UNIQUE_PRJ_pipeline": {
|
||||
"catalog": "main",
|
||||
"configuration": {
|
||||
"bundle.sourcePath": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src"
|
||||
},
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
|
||||
},
|
||||
"development": true,
|
||||
"id": "<UUID>",
|
||||
"libraries": [
|
||||
{
|
||||
"notebook": {
|
||||
"path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/dlt_pipeline"
|
||||
}
|
||||
}
|
||||
],
|
||||
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_pipeline",
|
||||
"target": "project_name_$UNIQUE_PRJ_dev",
|
||||
"url": "https://$DATABRICKS_HOST/pipelines/<UUID>?o=<NUMID>"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sync": {
|
||||
"paths": [
|
||||
"."
|
||||
]
|
||||
},
|
||||
"presets": {
|
||||
"name_prefix": "[dev $USERNAME] ",
|
||||
"pipelines_development": true,
|
||||
"trigger_pause_status": "PAUSED",
|
||||
"jobs_max_concurrent_runs": 4,
|
||||
"tags": {
|
||||
"dev": "$USERNAME"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
Name: project_name_$UNIQUE_PRJ
|
||||
Target: dev
|
||||
Workspace:
|
||||
Host: https://$DATABRICKS_HOST
|
||||
User: $USERNAME
|
||||
Path: /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev
|
||||
|
||||
Validation OK!
|
|
@ -0,0 +1,224 @@
|
|||
package testcli
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/databricks/cli/libs/iamutil"
|
||||
"github.com/databricks/cli/libs/testdiff"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var OverwriteMode = os.Getenv("TESTS_OUTPUT") == "OVERWRITE"
|
||||
|
||||
func ReadFile(t testutil.TestingT, ctx context.Context, filename string) string {
|
||||
data, err := os.ReadFile(filename)
|
||||
if os.IsNotExist(err) {
|
||||
return ""
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
// On CI, on Windows \n in the file somehow end up as \r\n
|
||||
return NormalizeNewlines(string(data))
|
||||
}
|
||||
|
||||
func captureOutput(t testutil.TestingT, ctx context.Context, args []string) string {
|
||||
t.Logf("run args: [%s]", strings.Join(args, ", "))
|
||||
r := NewRunner(t, ctx, args...)
|
||||
stdout, stderr, err := r.Run()
|
||||
assert.NoError(t, err)
|
||||
out := stderr.String() + stdout.String()
|
||||
return ReplaceOutput(t, ctx, out)
|
||||
}
|
||||
|
||||
func WriteFile(t testutil.TestingT, filename, data string) {
|
||||
t.Logf("Overwriting %s", filename)
|
||||
err := os.WriteFile(filename, []byte(data), 0o644)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func AssertOutput(t testutil.TestingT, ctx context.Context, args []string, expectedPath string) {
|
||||
expected := ReadFile(t, ctx, expectedPath)
|
||||
|
||||
out := captureOutput(t, ctx, args)
|
||||
|
||||
if out != expected {
|
||||
actual := fmt.Sprintf("Output from %v", args)
|
||||
testdiff.AssertEqualTexts(t, expectedPath, actual, expected, out)
|
||||
|
||||
if OverwriteMode {
|
||||
WriteFile(t, expectedPath, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func AssertOutputJQ(t testutil.TestingT, ctx context.Context, args []string, expectedPath string, ignorePaths []string) {
|
||||
expected := ReadFile(t, ctx, expectedPath)
|
||||
|
||||
out := captureOutput(t, ctx, args)
|
||||
|
||||
if out != expected {
|
||||
actual := fmt.Sprintf("Output from %v", args)
|
||||
testdiff.AssertEqualJQ(t.(*testing.T), expectedPath, actual, expected, out, ignorePaths)
|
||||
|
||||
if OverwriteMode {
|
||||
WriteFile(t, expectedPath, out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
uuidRegex = regexp.MustCompile(`[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}`)
|
||||
numIdRegex = regexp.MustCompile(`[0-9]{3,}`)
|
||||
privatePathRegex = regexp.MustCompile(`(/tmp|/private)(/.*)/([a-zA-Z0-9]+)`)
|
||||
)
|
||||
|
||||
func ReplaceOutput(t testutil.TestingT, ctx context.Context, out string) string {
|
||||
out = NormalizeNewlines(out)
|
||||
replacements := GetReplacementsMap(ctx)
|
||||
if replacements == nil {
|
||||
t.Fatal("WithReplacementsMap was not called")
|
||||
}
|
||||
out = replacements.Replace(out)
|
||||
out = uuidRegex.ReplaceAllString(out, "<UUID>")
|
||||
out = numIdRegex.ReplaceAllString(out, "<NUMID>")
|
||||
out = privatePathRegex.ReplaceAllString(out, "/tmp/.../$3")
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
type key int
|
||||
|
||||
const (
|
||||
replacementsMapKey = key(1)
|
||||
)
|
||||
|
||||
type Replacement struct {
|
||||
Old string
|
||||
New string
|
||||
}
|
||||
|
||||
type ReplacementsContext struct {
|
||||
Repls []Replacement
|
||||
}
|
||||
|
||||
func (r *ReplacementsContext) Replace(s string) string {
|
||||
// QQQ Should probably only replace whole words
|
||||
for _, repl := range r.Repls {
|
||||
s = strings.ReplaceAll(s, repl.Old, repl.New)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (r *ReplacementsContext) Set(old, new string) {
|
||||
if old == "" || new == "" {
|
||||
return
|
||||
}
|
||||
r.Repls = append(r.Repls, Replacement{Old: old, New: new})
|
||||
}
|
||||
|
||||
func WithReplacementsMap(ctx context.Context) (context.Context, *ReplacementsContext) {
|
||||
value := ctx.Value(replacementsMapKey)
|
||||
if value != nil {
|
||||
if existingMap, ok := value.(*ReplacementsContext); ok {
|
||||
return ctx, existingMap
|
||||
}
|
||||
}
|
||||
|
||||
newMap := &ReplacementsContext{}
|
||||
ctx = context.WithValue(ctx, replacementsMapKey, newMap)
|
||||
return ctx, newMap
|
||||
}
|
||||
|
||||
func GetReplacementsMap(ctx context.Context) *ReplacementsContext {
|
||||
value := ctx.Value(replacementsMapKey)
|
||||
if value != nil {
|
||||
if existingMap, ok := value.(*ReplacementsContext); ok {
|
||||
return existingMap
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func PrepareReplacements(t testutil.TestingT, r *ReplacementsContext, w *databricks.WorkspaceClient) {
|
||||
// in some clouds (gcp) w.Config.Host includes "https://" prefix in others it's really just a host (azure)
|
||||
host := strings.TrimPrefix(strings.TrimPrefix(w.Config.Host, "http://"), "https://")
|
||||
r.Set(host, "$DATABRICKS_HOST")
|
||||
r.Set(w.Config.ClusterID, "$DATABRICKS_CLUSTER_ID")
|
||||
r.Set(w.Config.WarehouseID, "$DATABRICKS_WAREHOUSE_ID")
|
||||
r.Set(w.Config.ServerlessComputeID, "$DATABRICKS_SERVERLESS_COMPUTE_ID")
|
||||
r.Set(w.Config.MetadataServiceURL, "$DATABRICKS_METADATA_SERVICE_URL")
|
||||
r.Set(w.Config.AccountID, "$DATABRICKS_ACCOUNT_ID")
|
||||
r.Set(w.Config.Token, "$DATABRICKS_TOKEN")
|
||||
r.Set(w.Config.Username, "$DATABRICKS_USERNAME")
|
||||
r.Set(w.Config.Password, "$DATABRICKS_PASSWORD")
|
||||
r.Set(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
|
||||
r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE")
|
||||
r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT")
|
||||
r.Set(w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS")
|
||||
r.Set(w.Config.AzureResourceID, "$DATABRICKS_AZURE_RESOURCE_ID")
|
||||
r.Set(w.Config.AzureClientSecret, "$ARM_CLIENT_SECRET")
|
||||
// r.Set(w.Config.AzureClientID, "$ARM_CLIENT_ID")
|
||||
r.Set(w.Config.AzureClientID, "$USERNAME")
|
||||
r.Set(w.Config.AzureTenantID, "$ARM_TENANT_ID")
|
||||
r.Set(w.Config.ActionsIDTokenRequestURL, "$ACTIONS_ID_TOKEN_REQUEST_URL")
|
||||
r.Set(w.Config.ActionsIDTokenRequestToken, "$ACTIONS_ID_TOKEN_REQUEST_TOKEN")
|
||||
r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT")
|
||||
r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID")
|
||||
r.Set(w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET")
|
||||
r.Set(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
|
||||
// This is set to words like "path" that happen too frequently
|
||||
// r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE")
|
||||
}
|
||||
|
||||
func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.User) {
|
||||
// There could be exact matches or overlap between different name fields, so sort them by length
|
||||
// to ensure we match the largest one first and map them all to the same token
|
||||
names := []string{
|
||||
u.DisplayName,
|
||||
u.UserName,
|
||||
iamutil.GetShortUserName(&u),
|
||||
u.Name.FamilyName,
|
||||
u.Name.GivenName,
|
||||
}
|
||||
if u.Name != nil {
|
||||
names = append(names, u.Name.FamilyName)
|
||||
names = append(names, u.Name.GivenName)
|
||||
}
|
||||
for _, val := range u.Emails {
|
||||
names = append(names, val.Value)
|
||||
}
|
||||
stableSortReverseLength(names)
|
||||
|
||||
for _, name := range names {
|
||||
r.Set(name, "$USERNAME")
|
||||
}
|
||||
|
||||
for ind, val := range u.Groups {
|
||||
r.Set(val.Value, fmt.Sprintf("$USER.Groups[%d]", ind))
|
||||
}
|
||||
|
||||
r.Set(u.Id, "$USER.Id")
|
||||
|
||||
for ind, val := range u.Roles {
|
||||
r.Set(val.Value, fmt.Sprintf("$USER.Roles[%d]", ind))
|
||||
}
|
||||
}
|
||||
|
||||
func stableSortReverseLength(strs []string) {
|
||||
slices.SortStableFunc(strs, func(a, b string) int {
|
||||
return len(b) - len(a)
|
||||
})
|
||||
}
|
||||
|
||||
func NormalizeNewlines(input string) string {
|
||||
output := strings.ReplaceAll(input, "\r\n", "\n")
|
||||
return strings.ReplaceAll(output, "\r", "\n")
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package testcli
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSort(t *testing.T) {
|
||||
input := []string{"a", "bc", "cd"}
|
||||
stableSortReverseLength(input)
|
||||
assert.Equal(t, []string{"bc", "cd", "a"}, input)
|
||||
}
|
|
@ -47,6 +47,9 @@ func Chdir(t TestingT, dir string) string {
|
|||
|
||||
wd, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
if os.Getenv("TESTS_ORIG_WD") == "" {
|
||||
t.Setenv("TESTS_ORIG_WD", wd)
|
||||
}
|
||||
|
||||
abs, err := filepath.Abs(dir)
|
||||
require.NoError(t, err)
|
||||
|
@ -61,3 +64,10 @@ func Chdir(t TestingT, dir string) string {
|
|||
|
||||
return wd
|
||||
}
|
||||
|
||||
// Return filename ff testutil.Chdir was not called.
|
||||
// Return absolute path to filename testutil.Chdir() was called.
|
||||
func TestData(filename string) string {
|
||||
// Note, if TESTS_ORIG_WD is not set, Getenv return "" and Join returns filename
|
||||
return filepath.Join(os.Getenv("TESTS_ORIG_WD"), filename)
|
||||
}
|
||||
|
|
|
@ -11,6 +11,19 @@ import (
|
|||
"runtime"
|
||||
)
|
||||
|
||||
// GetExecutable gets appropriate python binary name for the platform
|
||||
func GetExecutable() string {
|
||||
// On Windows when virtualenv is created, the <env>/Scripts directory
|
||||
// contains python.exe but no python3.exe.
|
||||
// Most installers (e.g. the ones from python.org) only install python.exe and not python3.exe
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
return "python"
|
||||
} else {
|
||||
return "python3"
|
||||
}
|
||||
}
|
||||
|
||||
// DetectExecutable looks up the path to the python3 executable from the PATH
|
||||
// environment variable.
|
||||
//
|
||||
|
@ -25,7 +38,9 @@ func DetectExecutable(ctx context.Context) (string, error) {
|
|||
// the parent directory tree.
|
||||
//
|
||||
// See https://github.com/pyenv/pyenv#understanding-python-version-selection
|
||||
out, err := exec.LookPath("python3")
|
||||
|
||||
out, err := exec.LookPath(GetExecutable())
|
||||
|
||||
// most of the OS'es have python3 in $PATH, but for those which don't,
|
||||
// we perform the latest version lookup
|
||||
if err != nil && !errors.Is(err, exec.ErrNotFound) {
|
||||
|
@ -54,7 +69,7 @@ func DetectExecutable(ctx context.Context) (string, error) {
|
|||
func DetectVEnvExecutable(venvPath string) (string, error) {
|
||||
interpreterPath := filepath.Join(venvPath, "bin", "python3")
|
||||
if runtime.GOOS == "windows" {
|
||||
interpreterPath = filepath.Join(venvPath, "Scripts", "python3.exe")
|
||||
interpreterPath = filepath.Join(venvPath, "Scripts", "python.exe")
|
||||
}
|
||||
|
||||
if _, err := os.Stat(interpreterPath); err != nil {
|
||||
|
|
|
@ -39,7 +39,7 @@ func TestDetectVEnvExecutable_badLayout(t *testing.T) {
|
|||
|
||||
func interpreterPath(venvPath string) string {
|
||||
if runtime.GOOS == "windows" {
|
||||
return filepath.Join(venvPath, "Scripts", "python3.exe")
|
||||
return filepath.Join(venvPath, "Scripts", "python.exe")
|
||||
} else {
|
||||
return filepath.Join(venvPath, "bin", "python3")
|
||||
}
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
package pythontest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type VenvOpts struct {
|
||||
// input
|
||||
PythonVersion string
|
||||
skipVersionCheck bool
|
||||
|
||||
// input/output
|
||||
Dir string
|
||||
Name string
|
||||
|
||||
// output:
|
||||
// Absolute path to venv
|
||||
EnvPath string
|
||||
|
||||
// Absolute path to venv/bin or venv/Scripts, depending on OS
|
||||
BinPath string
|
||||
|
||||
// Absolute path to python binary
|
||||
PythonExe string
|
||||
}
|
||||
|
||||
func CreatePythonEnv(opts *VenvOpts) error {
|
||||
if opts == nil || opts.PythonVersion == "" {
|
||||
return errors.New("PythonVersion must be provided")
|
||||
}
|
||||
if opts.Name == "" {
|
||||
opts.Name = testutil.RandomName("test-venv-")
|
||||
}
|
||||
|
||||
cmd := exec.Command("uv", "venv", opts.Name, "--python", opts.PythonVersion, "--seed", "-q")
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Dir = opts.Dir
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
opts.EnvPath, err = filepath.Abs(filepath.Join(opts.Dir, opts.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = os.Stat(opts.EnvPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot stat EnvPath %s: %s", opts.EnvPath, err)
|
||||
}
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
// https://github.com/pypa/virtualenv/commit/993ba1316a83b760370f5a3872b3f5ef4dd904c1
|
||||
opts.BinPath = filepath.Join(opts.EnvPath, "Scripts")
|
||||
opts.PythonExe = filepath.Join(opts.BinPath, "python.exe")
|
||||
} else {
|
||||
opts.BinPath = filepath.Join(opts.EnvPath, "bin")
|
||||
opts.PythonExe = filepath.Join(opts.BinPath, "python3")
|
||||
}
|
||||
|
||||
_, err = os.Stat(opts.BinPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot stat BinPath %s: %s", opts.BinPath, err)
|
||||
}
|
||||
|
||||
_, err = os.Stat(opts.PythonExe)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot stat PythonExe %s: %s", opts.PythonExe, err)
|
||||
}
|
||||
|
||||
if !opts.skipVersionCheck {
|
||||
cmd := exec.Command(opts.PythonExe, "--version")
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to run %s --version: %s", opts.PythonExe, err)
|
||||
}
|
||||
outString := string(out)
|
||||
expectVersion := "Python " + opts.PythonVersion
|
||||
if !strings.HasPrefix(outString, expectVersion) {
|
||||
return fmt.Errorf("Unexpected output from %s --version: %v (expected %v)", opts.PythonExe, outString, expectVersion)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func RequireActivatedPythonEnv(t *testing.T, ctx context.Context, opts *VenvOpts) {
|
||||
err := CreatePythonEnv(opts)
|
||||
require.NoError(t, err)
|
||||
require.DirExists(t, opts.BinPath)
|
||||
|
||||
newPath := fmt.Sprintf("%s%c%s", opts.BinPath, os.PathListSeparator, os.Getenv("PATH"))
|
||||
t.Setenv("PATH", newPath)
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
package pythontest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/libs/python"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestVenvSuccess(t *testing.T) {
|
||||
// Test at least two version to ensure we capture a case where venv version does not match system one
|
||||
for _, pythonVersion := range []string{"3.11", "3.12"} {
|
||||
t.Run(pythonVersion, func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
dir := t.TempDir()
|
||||
opts := VenvOpts{
|
||||
PythonVersion: pythonVersion,
|
||||
Dir: dir,
|
||||
}
|
||||
RequireActivatedPythonEnv(t, ctx, &opts)
|
||||
require.DirExists(t, opts.EnvPath)
|
||||
require.DirExists(t, opts.BinPath)
|
||||
require.FileExists(t, opts.PythonExe)
|
||||
|
||||
pythonExe, err := exec.LookPath(python.GetExecutable())
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, filepath.Dir(pythonExe), filepath.Dir(opts.PythonExe))
|
||||
require.FileExists(t, pythonExe)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrongVersion(t *testing.T) {
|
||||
require.Error(t, CreatePythonEnv(&VenvOpts{PythonVersion: "4.0"}))
|
||||
}
|
||||
|
||||
func TestMissingVersion(t *testing.T) {
|
||||
require.Error(t, CreatePythonEnv(nil))
|
||||
require.Error(t, CreatePythonEnv(&VenvOpts{}))
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
package testdiff
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/hexops/gotextdiff"
|
||||
"github.com/hexops/gotextdiff/myers"
|
||||
"github.com/hexops/gotextdiff/span"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/wI2L/jsondiff"
|
||||
)
|
||||
|
||||
func UnifiedDiff(filename1, filename2, s1, s2 string) string {
|
||||
edits := myers.ComputeEdits(span.URIFromPath(filename1), s1, s2)
|
||||
return fmt.Sprint(gotextdiff.ToUnified(filename1, filename2, s1, edits))
|
||||
}
|
||||
|
||||
func AssertEqualTexts(t testutil.TestingT, filename1, filename2, expected, out string) {
|
||||
if len(out) < 1000 && len(expected) < 1000 {
|
||||
// This shows full strings + diff which could be useful when debugging newlines
|
||||
assert.Equal(t, expected, out)
|
||||
} else {
|
||||
// only show diff for large texts
|
||||
diff := UnifiedDiff(filename1, filename2, expected, out)
|
||||
t.Errorf("Diff:\n" + diff)
|
||||
}
|
||||
}
|
||||
|
||||
func AssertEqualJQ(t testutil.TestingT, expectedName, outName, expected, out string, ignorePaths []string) {
|
||||
patch, err := jsondiff.CompareJSON([]byte(expected), []byte(out))
|
||||
if err != nil {
|
||||
t.Logf("CompareJSON error for %s vs %s: %s (fallback to textual comparison)", outName, expectedName, err)
|
||||
AssertEqualTexts(t, expectedName, outName, expected, out)
|
||||
} else {
|
||||
diff := UnifiedDiff(expectedName, outName, expected, out)
|
||||
t.Logf("Diff:\n%s", diff)
|
||||
allowedDiffs := []string{}
|
||||
erroredDiffs := []string{}
|
||||
for _, op := range patch {
|
||||
if allowDifference(ignorePaths, op) {
|
||||
allowedDiffs = append(allowedDiffs, fmt.Sprintf("%7s %s %v old=%v", op.Type, op.Path, op.Value, op.OldValue))
|
||||
} else {
|
||||
erroredDiffs = append(erroredDiffs, fmt.Sprintf("%7s %s %v old=%v", op.Type, op.Path, op.Value, op.OldValue))
|
||||
}
|
||||
}
|
||||
if len(allowedDiffs) > 0 {
|
||||
t.Logf("Allowed differences between %s and %s:\n ==> %s", expectedName, outName, strings.Join(allowedDiffs, "\n ==> "))
|
||||
}
|
||||
if len(erroredDiffs) > 0 {
|
||||
t.Errorf("Unexpected differences between %s and %s:\n ==> %s", expectedName, outName, strings.Join(erroredDiffs, "\n ==> "))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func allowDifference(ignorePaths []string, op jsondiff.Operation) bool {
|
||||
if matchesPrefixes(ignorePaths, op.Path) {
|
||||
return true
|
||||
}
|
||||
if op.Type == "replace" && almostSameStrings(op.OldValue, op.Value) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// compare strings and ignore forward vs backward slashes
|
||||
func almostSameStrings(v1, v2 any) bool {
|
||||
s1, ok := v1.(string)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
s2, ok := v2.(string)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return strings.ReplaceAll(s1, "\\", "/") == strings.ReplaceAll(s2, "\\", "/")
|
||||
}
|
||||
|
||||
func matchesPrefixes(prefixes []string, path string) bool {
|
||||
for _, p := range prefixes {
|
||||
if p == path {
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(path, p+"/") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
package testdiff
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDiff(t *testing.T) {
|
||||
assert.Equal(t, "", UnifiedDiff("a", "b", "", ""))
|
||||
assert.Equal(t, "", UnifiedDiff("a", "b", "abc", "abc"))
|
||||
assert.Equal(t, "--- a\n+++ b\n@@ -1 +1,2 @@\n abc\n+123\n", UnifiedDiff("a", "b", "abc\n", "abc\n123\n"))
|
||||
}
|
||||
|
||||
func TestMatchesPrefixes(t *testing.T) {
|
||||
assert.False(t, matchesPrefixes([]string{}, ""))
|
||||
assert.False(t, matchesPrefixes([]string{"/hello", "/hello/world"}, ""))
|
||||
assert.True(t, matchesPrefixes([]string{"/hello", "/a/b"}, "/hello"))
|
||||
assert.True(t, matchesPrefixes([]string{"/hello", "/a/b"}, "/a/b/c"))
|
||||
}
|
Loading…
Reference in New Issue