Compare commits

..

17 Commits

Author SHA1 Message Date
Andrew Nester cb7bca22af
added missing err check 2024-12-16 15:37:53 +01:00
Andrew Nester 1e74a0d257
Added support for bundle generate and bind for Apps 2024-12-16 15:37:53 +01:00
Andrew Nester 6b7f4de652
Added support for Databricks Apps in DABs 2024-12-16 15:37:51 +01:00
Andrew Nester dde1b020fc
fixed tests after rebase 2024-12-16 15:29:04 +01:00
Andrew Nester 438c19f2af
fixed lint 2024-12-16 15:27:23 +01:00
Andrew Nester 59c8254770
test fixes 2024-12-16 15:27:23 +01:00
Andrew Nester cecc5695cd
fixes 2024-12-16 15:27:22 +01:00
Andrew Nester b6af5b44df
fixes after rebase 2024-12-16 15:27:22 +01:00
Andrew Nester 298d745fb5
use TF provider 1.61.0 2024-12-16 15:27:22 +01:00
Andrew Nester 6915222ca5
interpolate after 2024-12-16 15:27:22 +01:00
Andrew Nester fc00adb427
addressed feedback 2024-12-16 15:27:22 +01:00
Andrew Nester c4e7b52832
fix fmt 2024-12-16 15:27:21 +01:00
Andrew Nester d8a9b2f4df
addressed feedback 2024-12-16 15:27:21 +01:00
Andrew Nester d8f210a2ad
chnaged to wait for deployment on start 2024-12-16 15:27:21 +01:00
Andrew Nester 79dd75a926
fix test 2024-12-16 15:27:21 +01:00
Andrew Nester 79323ee356
Added integration test 2024-12-16 15:27:21 +01:00
Andrew Nester 98140b4087
Added support for Databricks Apps in DABs 2024-12-16 15:27:18 +01:00
68 changed files with 495 additions and 4457 deletions

View File

@ -11,7 +11,7 @@
"required": ["go"],
"post_generate": [
"go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build",
"make schema",
"go run ./bundle/internal/schema/*.go ./bundle/schema/jsonschema.json",
"echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes",
"echo 'go.sum linguist-generated=true' >> ./.gitattributes",
"echo 'bundle/schema/jsonschema.json linguist-generated=true' >> ./.gitattributes"

View File

@ -411,5 +411,5 @@ func new{{.PascalName}}() *cobra.Command {
{{- define "request-body-obj" -}}
{{- $method := .Method -}}
{{- $field := .Field -}}
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (and (not $field.IsPath) (not $field.IsQuery))) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (not $field.IsPath)) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
{{- end -}}

View File

@ -57,6 +57,38 @@ jobs:
- name: Publish test coverage
uses: codecov/codecov-action@v4
fmt:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: 1.23.2
# No need to download cached dependencies when running gofmt.
cache: false
- name: Install goimports
run: |
go install golang.org/x/tools/cmd/goimports@latest
- name: Run make fmt
run: |
make fmt
- name: Run go mod tidy
run: |
go mod tidy
- name: Fail on differences
run: |
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
golangci:
name: lint
runs-on: ubuntu-latest
@ -65,13 +97,6 @@ jobs:
- uses: actions/setup-go@v5
with:
go-version: 1.23.2
- name: Run go mod tidy
run: |
go mod tidy
- name: Fail on differences
run: |
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@v6
with:
@ -99,19 +124,14 @@ jobs:
# By default the ajv-cli runs in strict mode which will fail if the schema
# itself is not valid. Strict mode is more strict than the JSON schema
# specification. See for details: https://ajv.js.org/options.html#strict-mode-options
# The ajv-cli is configured to use the markdownDescription keyword which is not part of the JSON schema specification,
# but is used in editors like VSCode to render markdown in the description field
- name: Validate bundle schema
run: |
go run main.go bundle schema > schema.json
# Add markdownDescription keyword to ajv
echo "module.exports=function(a){a.addKeyword('markdownDescription')}" >> keywords.js
for file in ./bundle/internal/schema/testdata/pass/*.yml; do
ajv test -s schema.json -d $file --valid -c=./keywords.js
ajv test -s schema.json -d $file --valid
done
for file in ./bundle/internal/schema/testdata/fail/*.yml; do
ajv test -s schema.json -d $file --invalid -c=./keywords.js
ajv test -s schema.json -d $file --invalid
done

View File

@ -1,5 +1,11 @@
default: build
fmt:
@echo "✓ Formatting source code with goimports ..."
@goimports -w $(shell find . -type f -name '*.go' -not -path "./vendor/*")
@echo "✓ Formatting source code with gofmt ..."
@gofmt -w $(shell find . -type f -name '*.go' -not -path "./vendor/*")
lint: vendor
@echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..."
@golangci-lint run --fix ./...
@ -29,17 +35,8 @@ snapshot:
vendor:
@echo "✓ Filling vendor folder with library code ..."
@go mod vendor
schema:
@echo "✓ Generating json-schema ..."
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
integration:
$(INTEGRATION)
gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
integration-short:
$(INTEGRATION) -short
.PHONY: lint lintcheck test testonly coverage build snapshot vendor schema integration integration-short
.PHONY: fmt lint lintcheck test testonly coverage build snapshot vendor integration

View File

@ -19,27 +19,37 @@ func (i *interpolateVariables) Apply(ctx context.Context, b *bundle.Bundle) diag
dyn.Key("config"),
)
tfToConfigMap := map[string]string{
"databricks_pipeline": "pipelines",
"databricks_job": "jobs",
"databricks_mlflow_model": "models",
"databricks_mlflow_experiment": "experiments",
"databricks_model_serving": "model_serving_endpoints",
"databricks_registered_model": "registered_models",
"databricks_quality_monitor": "quality_monitors",
"databricks_schema": "schemas",
"databricks_volume": "volumes",
"databricks_cluster": "clusters",
"databricks_dashboard": "dashboards",
"databricks_app": "apps",
}
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
return dyn.MapByPattern(root, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
key, ok := tfToConfigMap[path[0].Key()]
if ok {
path = dyn.NewPath(dyn.Key("resources"), dyn.Key(key)).Append(path[1:]...)
switch path[0] {
case dyn.Key("databricks_pipeline"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("pipelines")).Append(path[1:]...)
case dyn.Key("databricks_job"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs")).Append(path[1:]...)
case dyn.Key("databricks_mlflow_model"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("models")).Append(path[1:]...)
case dyn.Key("databricks_mlflow_experiment"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("experiments")).Append(path[1:]...)
case dyn.Key("databricks_model_serving"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("model_serving_endpoints")).Append(path[1:]...)
case dyn.Key("databricks_registered_model"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("registered_models")).Append(path[1:]...)
case dyn.Key("databricks_quality_monitor"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("quality_monitors")).Append(path[1:]...)
case dyn.Key("databricks_schema"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("schemas")).Append(path[1:]...)
case dyn.Key("databricks_volume"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("volumes")).Append(path[1:]...)
case dyn.Key("databricks_cluster"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("clusters")).Append(path[1:]...)
case dyn.Key("databricks_dashboard"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("dashboards")).Append(path[1:]...)
case dyn.Key("databricks_app"):
path = dyn.NewPath(dyn.Key("resources"), dyn.Key("apps")).Append(path[1:]...)
default:
// Trigger "key not found" for unknown resource types.
return dyn.GetByPath(root, path)
}
return dyn.GetByPath(root, path)

View File

@ -25,8 +25,6 @@ func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
var diags diag.Diagnostics
errGroup, ctx := errgroup.WithContext(ctx)
diagsPerApp := make(map[string]diag.Diagnostic)
for key, app := range b.Config.Resources.Apps {
// If the app has a config, we need to deploy it first.
// It means we need to write app.yml file with the content of the config field
@ -59,12 +57,12 @@ func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
errGroup.Go(func() error {
err = f.Write(ctx, path.Join(appPath, "app.yml"), buf, filer.OverwriteIfExists)
if err != nil {
diagsPerApp[key] = diag.Diagnostic{
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Failed to save config",
Detail: fmt.Sprintf("Failed to write %s file: %s", path.Join(app.SourceCodePath, "app.yml"), err),
Locations: b.Config.GetLocations(fmt.Sprintf("resources.apps.%s", key)),
}
})
}
return nil
})
@ -72,11 +70,7 @@ func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
}
if err := errGroup.Wait(); err != nil {
return diags.Extend(diag.FromErr(err))
}
for _, diag := range diagsPerApp {
diags = append(diags, diag)
return diag.FromErr(err)
}
return diags

View File

@ -6,7 +6,7 @@ import (
"github.com/databricks/databricks-sdk-go/service/apps"
)
func ConvertAppToValue(app *apps.App, sourceCodePath string, appConfig map[string]any) (dyn.Value, error) {
func ConvertAppToValue(app *apps.App, sourceCodePath string, appConfig map[string]interface{}) (dyn.Value, error) {
ac, err := convert.FromTyped(appConfig, dyn.NilValue)
if err != nil {
return dyn.NilValue, err

View File

@ -12,6 +12,7 @@ import (
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/require"
@ -482,3 +483,59 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
})
}
}
func TestApplyPresetsPrefixForApps(t *testing.T) {
tests := []struct {
name string
prefix string
app *resources.App
want string
}{
{
name: "add prefix to app",
prefix: "[prefix] ",
app: &resources.App{
App: &apps.App{
Name: "app1",
},
},
want: "prefix-app1",
},
{
name: "add empty prefix to app",
prefix: "",
app: &resources.App{
App: &apps.App{
Name: "app1",
},
},
want: "app1",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"app1": tt.app,
},
},
Presets: config.Presets{
NamePrefix: tt.prefix,
},
},
}
ctx := context.Background()
diag := bundle.Apply(ctx, b, mutator.ApplyPresets())
if diag.HasError() {
t.Fatalf("unexpected error: %v", diag)
}
require.Equal(t, tt.want, b.Config.Resources.Apps["app1"].Name)
})
}
}

View File

@ -12,6 +12,11 @@ import (
)
type App struct {
// This represents the id which is the name of the app that can be used
// as a reference in other resources. This value is returned by terraform.
// This equals to app name and added for symmetry with other resources.
ID string `json:"id,omitempty" bundle:"readonly"`
// SourceCodePath is a required field used by DABs to point to Databricks app source code
// on local disk and to the corresponding workspace path during app deployment.
SourceCodePath string `json:"source_code_path"`
@ -51,10 +56,10 @@ func (a *App) TerraformResourceName() string {
}
func (a *App) InitializeURL(baseURL url.URL) {
if a.Name == "" {
if a.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("apps/%s", a.Name)
baseURL.Path = fmt.Sprintf("apps/%s", a.ID)
a.URL = baseURL.String()
}

View File

@ -9,7 +9,6 @@ import (
"github.com/databricks/cli/bundle/deploy/terraform/tfdyn"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/apps"
tfjson "github.com/hashicorp/terraform-json"
)
@ -203,9 +202,9 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
}
cur := config.Resources.Apps[resource.Name]
if cur == nil {
cur = &resources.App{ModifiedStatus: resources.ModifiedStatusDeleted, App: &apps.App{}}
cur = &resources.App{ModifiedStatus: resources.ModifiedStatusDeleted}
}
cur.Name = instance.Attributes.Name
cur.ID = instance.Attributes.ID
config.Resources.Apps[resource.Name] = cur
case "databricks_permissions":
case "databricks_grants":
@ -272,7 +271,7 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
}
}
for _, src := range config.Resources.Apps {
if src.ModifiedStatus == "" && src.Name == "" {
if src.ModifiedStatus == "" && src.ID == "" {
src.ModifiedStatus = resources.ModifiedStatusCreated
}
}

View File

@ -700,7 +700,7 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
Mode: "managed",
Name: "test_app",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app1"}},
{Attributes: stateInstanceAttributes{ID: "1"}},
},
},
},
@ -741,7 +741,7 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
assert.Equal(t, "app1", config.Resources.Apps["test_app"].Name)
assert.Equal(t, "1", config.Resources.Apps["test_app"].ID)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app"].ModifiedStatus)
AssertFullResourceCoverage(t, &config)
@ -830,7 +830,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
Apps: map[string]*resources.App{
"test_app": {
App: &apps.App{
Description: "test_app",
Name: "test_app",
},
},
},
@ -875,7 +875,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
assert.Equal(t, "", config.Resources.Apps["test_app"].Name)
assert.Equal(t, "", config.Resources.Apps["test_app"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app"].ModifiedStatus)
AssertFullResourceCoverage(t, &config)
@ -1019,12 +1019,12 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
Apps: map[string]*resources.App{
"test_app": {
App: &apps.App{
Description: "test_app",
Name: "test_app",
},
},
"test_app_new": {
App: &apps.App{
Description: "test_app_new",
Name: "test_app_new",
},
},
},
@ -1213,7 +1213,7 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
Mode: "managed",
Name: "test_app",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app1"}},
{Attributes: stateInstanceAttributes{ID: "1"}},
},
},
{
@ -1221,7 +1221,7 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
Mode: "managed",
Name: "test_app_old",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app2"}},
{Attributes: stateInstanceAttributes{ID: "2"}},
},
},
},
@ -1306,11 +1306,11 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus)
assert.Equal(t, "app1", config.Resources.Apps["test_app"].Name)
assert.Equal(t, "1", config.Resources.Apps["test_app"].ID)
assert.Equal(t, "", config.Resources.Apps["test_app"].ModifiedStatus)
assert.Equal(t, "app2", config.Resources.Apps["test_app_old"].Name)
assert.Equal(t, "2", config.Resources.Apps["test_app_old"].ID)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app_old"].ModifiedStatus)
assert.Equal(t, "", config.Resources.Apps["test_app_new"].Name)
assert.Equal(t, "", config.Resources.Apps["test_app_new"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app_new"].ModifiedStatus)
AssertFullResourceCoverage(t, &config)

View File

@ -34,7 +34,6 @@ type stateResourceInstance struct {
type stateInstanceAttributes struct {
ID string `json:"id"`
Name string `json:"name,omitempty"`
ETag string `json:"etag,omitempty"`
}

View File

@ -97,7 +97,7 @@ func TestParseResourcesStateWithExistingStateFile(t *testing.T) {
Type: "databricks_pipeline",
Name: "test_pipeline",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{ID: "123", Name: "test_pipeline"}},
{Attributes: stateInstanceAttributes{ID: "123"}},
},
},
},

View File

@ -1,209 +0,0 @@
package main
import (
"bytes"
"fmt"
"os"
"reflect"
"regexp"
"strings"
yaml3 "gopkg.in/yaml.v3"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/dyn/yamlsaver"
"github.com/databricks/cli/libs/jsonschema"
)
type annotation struct {
Description string `json:"description,omitempty"`
MarkdownDescription string `json:"markdown_description,omitempty"`
Title string `json:"title,omitempty"`
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
}
type annotationHandler struct {
// Annotations read from all annotation files including all overrides
parsedAnnotations annotationFile
// Missing annotations for fields that are found in config that need to be added to the annotation file
missingAnnotations annotationFile
}
/**
* Parsed file with annotations, expected format:
* github.com/databricks/cli/bundle/config.Bundle:
* cluster_id:
* description: "Description"
*/
type annotationFile map[string]map[string]annotation
const Placeholder = "PLACEHOLDER"
// Adds annotations to the JSON schema reading from the annotation files.
// More details https://json-schema.org/understanding-json-schema/reference/annotations
func newAnnotationHandler(sources []string) (*annotationHandler, error) {
prev := dyn.NilValue
for _, path := range sources {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
generated, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b))
if err != nil {
return nil, err
}
prev, err = merge.Merge(prev, generated)
if err != nil {
return nil, err
}
}
var data annotationFile
err := convert.ToTyped(&data, prev)
if err != nil {
return nil, err
}
d := &annotationHandler{}
d.parsedAnnotations = data
d.missingAnnotations = annotationFile{}
return d, nil
}
func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
refPath := getPath(typ)
shouldHandle := strings.HasPrefix(refPath, "github.com")
if !shouldHandle {
return s
}
annotations := d.parsedAnnotations[refPath]
if annotations == nil {
annotations = map[string]annotation{}
}
rootTypeAnnotation, ok := annotations[RootTypeKey]
if ok {
assignAnnotation(&s, rootTypeAnnotation)
}
for k, v := range s.Properties {
item := annotations[k]
if item.Description == "" {
item.Description = Placeholder
emptyAnnotations := d.missingAnnotations[refPath]
if emptyAnnotations == nil {
emptyAnnotations = map[string]annotation{}
d.missingAnnotations[refPath] = emptyAnnotations
}
emptyAnnotations[k] = item
}
assignAnnotation(v, item)
}
return s
}
// Writes missing annotations with placeholder values back to the annotation file
func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error {
existingFile, err := os.ReadFile(outputPath)
if err != nil {
return err
}
existing, err := yamlloader.LoadYAML("", bytes.NewBuffer(existingFile))
if err != nil {
return err
}
missingAnnotations, err := convert.FromTyped(&d.missingAnnotations, dyn.NilValue)
if err != nil {
return err
}
output, err := merge.Merge(existing, missingAnnotations)
if err != nil {
return err
}
err = saveYamlWithStyle(outputPath, output)
if err != nil {
return err
}
return nil
}
func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assignAnnotation(s *jsonschema.Schema, a annotation) {
if a.Description != Placeholder {
s.Description = a.Description
}
if a.Default != nil {
s.Default = a.Default
}
s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription)
s.Title = a.Title
s.Enum = a.Enum
}
func saveYamlWithStyle(outputPath string, input dyn.Value) error {
style := map[string]yaml3.Style{}
file, _ := input.AsMap()
for _, v := range file.Keys() {
style[v.MustString()] = yaml3.LiteralStyle
}
saver := yamlsaver.NewSaverWithStyle(style)
err := saver.SaveAsYAML(file, outputPath, true)
if err != nil {
return err
}
return nil
}
func convertLinksToAbsoluteUrl(s string) string {
if s == "" {
return s
}
base := "https://docs.databricks.com"
referencePage := "/dev-tools/bundles/reference.html"
// Regular expression to match Markdown-style links like [_](link)
re := regexp.MustCompile(`\[_\]\(([^)]+)\)`)
result := re.ReplaceAllStringFunc(s, func(match string) string {
matches := re.FindStringSubmatch(match)
if len(matches) < 2 {
return match
}
link := matches[1]
var text, absoluteURL string
if strings.HasPrefix(link, "#") {
text = strings.TrimPrefix(link, "#")
absoluteURL = fmt.Sprintf("%s%s%s", base, referencePage, link)
// Handle relative paths like /dev-tools/bundles/resources.html#dashboard
} else if strings.HasPrefix(link, "/") {
absoluteURL = strings.ReplaceAll(fmt.Sprintf("%s%s", base, link), ".md", ".html")
if strings.Contains(link, "#") {
parts := strings.Split(link, "#")
text = parts[1]
} else {
text = "link"
}
} else {
return match
}
return fmt.Sprintf("[%s](%s)", text, absoluteURL)
})
return result
}

View File

@ -1,662 +0,0 @@
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.App:
"create_time":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
"active_deployment":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
"service_principal_name":
"description": |-
PLACEHOLDER
"config":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"service_principal_id":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"compute_status":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"name":
"description": |-
PLACEHOLDER
"secret":
"description": |-
PLACEHOLDER
"serving_endpoint":
"description": |-
PLACEHOLDER
"sql_warehouse":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"state":
"description": |-
PLACEHOLDER
"message":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"permission":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config.Artifact:
"build":
"description": |-
An optional set of non-default build commands that you want to run locally before deployment.
For Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.
To specify multiple build commands, separate each command with double-ampersand (&&) characters.
"executable":
"description": |-
The executable type.
"files":
"description": |-
The source files for the artifact.
"markdown_description": |-
The source files for the artifact, defined as an [_](#artifact_file).
"path":
"description": |-
The location where the built artifact will be saved.
"type":
"description": |-
The type of the artifact.
"markdown_description": |-
The type of the artifact. Valid values are `wheel` or `jar`
github.com/databricks/cli/bundle/config.ArtifactFile:
"source":
"description": |-
The path of the files used to build the artifact.
github.com/databricks/cli/bundle/config.Bundle:
"cluster_id":
"description": |-
The ID of a cluster to use to run the bundle.
"markdown_description": |-
The ID of a cluster to use to run the bundle. See [_](/dev-tools/bundles/settings.md#cluster_id).
"compute_id":
"description": |-
PLACEHOLDER
"databricks_cli_version":
"description": |-
The Databricks CLI version to use for the bundle.
"markdown_description": |-
The Databricks CLI version to use for the bundle. See [_](/dev-tools/bundles/settings.md#databricks_cli_version).
"deployment":
"description": |-
The definition of the bundle deployment
"markdown_description": |-
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
"git":
"description": |-
The Git version control details that are associated with your bundle.
"markdown_description": |-
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
"name":
"description": |-
The name of the bundle.
"uuid":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config.Deployment:
"fail_on_active_runs":
"description": |-
Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted.
"lock":
"description": |-
The deployment lock attributes.
"markdown_description": |-
The deployment lock attributes. See [_](#lock).
github.com/databricks/cli/bundle/config.Experimental:
"pydabs":
"description": |-
The PyDABs configuration.
"python_wheel_wrapper":
"description": |-
Whether to use a Python wheel wrapper
"scripts":
"description": |-
The commands to run
"use_legacy_run_as":
"description": |-
Whether to use the legacy run_as behavior
github.com/databricks/cli/bundle/config.Git:
"branch":
"description": |-
The Git branch name.
"markdown_description": |-
The Git branch name. See [_](/dev-tools/bundles/settings.md#git).
"origin_url":
"description": |-
The origin URL of the repository.
"markdown_description": |-
The origin URL of the repository. See [_](/dev-tools/bundles/settings.md#git).
github.com/databricks/cli/bundle/config.Lock:
"enabled":
"description": |-
Whether this lock is enabled.
"force":
"description": |-
Whether to force this lock if it is enabled.
github.com/databricks/cli/bundle/config.Presets:
"jobs_max_concurrent_runs":
"description": |-
The maximum concurrent runs for a job.
"name_prefix":
"description": |-
The prefix for job runs of the bundle.
"pipelines_development":
"description": |-
Whether pipeline deployments should be locked in development mode.
"source_linked_deployment":
"description": |-
Whether to link the deployment to the bundle source.
"tags":
"description": |-
The tags for the bundle deployment.
"trigger_pause_status":
"description": |-
A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED.
github.com/databricks/cli/bundle/config.PyDABs:
"enabled":
"description": |-
Whether or not PyDABs (Private Preview) is enabled
"import":
"description": |-
The PyDABs project to import to discover resources, resource generator and mutators
"venv_path":
"description": |-
The Python virtual environment path
github.com/databricks/cli/bundle/config.Resources:
"apps":
"description": |-
PLACEHOLDER
"clusters":
"description": |-
The cluster definitions for the bundle.
"markdown_description": |-
The cluster definitions for the bundle. See [_](/dev-tools/bundles/resources.md#cluster)
"dashboards":
"description": |-
The dashboard definitions for the bundle.
"markdown_description": |-
The dashboard definitions for the bundle. See [_](/dev-tools/bundles/resources.md#dashboard)
"experiments":
"description": |-
The experiment definitions for the bundle.
"markdown_description": |-
The experiment definitions for the bundle. See [_](/dev-tools/bundles/resources.md#experiment)
"jobs":
"description": |-
The job definitions for the bundle.
"markdown_description": |-
The job definitions for the bundle. See [_](/dev-tools/bundles/resources.md#job)
"model_serving_endpoints":
"description": |-
The model serving endpoint definitions for the bundle.
"markdown_description": |-
The model serving endpoint definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model_serving_endpoint)
"models":
"description": |-
The model definitions for the bundle.
"markdown_description": |-
The model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model)
"pipelines":
"description": |-
The pipeline definitions for the bundle.
"markdown_description": |-
The pipeline definitions for the bundle. See [_](/dev-tools/bundles/resources.md#pipeline)
"quality_monitors":
"description": |-
The quality monitor definitions for the bundle.
"markdown_description": |-
The quality monitor definitions for the bundle. See [_](/dev-tools/bundles/resources.md#quality_monitor)
"registered_models":
"description": |-
The registered model definitions for the bundle.
"markdown_description": |-
The registered model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#registered_model)
"schemas":
"description": |-
The schema definitions for the bundle.
"markdown_description": |-
The schema definitions for the bundle. See [_](/dev-tools/bundles/resources.md#schema)
"volumes":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config.Root:
"artifacts":
"description": |-
Defines the attributes to build an artifact
"bundle":
"description": |-
The attributes of the bundle.
"markdown_description": |-
The attributes of the bundle. See [_](/dev-tools/bundles/settings.md#bundle)
"experimental":
"description": |-
Defines attributes for experimental features.
"include":
"description": |-
Specifies a list of path globs that contain configuration files to include within the bundle.
"markdown_description": |-
Specifies a list of path globs that contain configuration files to include within the bundle. See [_](/dev-tools/bundles/settings.md#include)
"permissions":
"description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle
"markdown_description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
"presets":
"description": |-
Defines bundle deployment presets.
"markdown_description": |-
Defines bundle deployment presets. See [_](/dev-tools/bundles/deployment-modes.md#presets).
"resources":
"description": |-
Specifies information about the Databricks resources used by the bundle
"markdown_description": |-
Specifies information about the Databricks resources used by the bundle. See [_](/dev-tools/bundles/resources.md).
"run_as":
"description": |-
The identity to use to run the bundle.
"sync":
"description": |-
The files and file paths to include or exclude in the bundle.
"markdown_description": |-
The files and file paths to include or exclude in the bundle. See [_](/dev-tools/bundles/)
"targets":
"description": |-
Defines deployment targets for the bundle.
"variables":
"description": |-
A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.
"workspace":
"description": |-
Defines the Databricks workspace for the bundle.
github.com/databricks/cli/bundle/config.Sync:
"exclude":
"description": |-
A list of files or folders to exclude from the bundle.
"include":
"description": |-
A list of files or folders to include in the bundle.
"paths":
"description": |-
The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed.
github.com/databricks/cli/bundle/config.Target:
"artifacts":
"description": |-
The artifacts to include in the target deployment.
"markdown_description": |-
The artifacts to include in the target deployment. See [_](#artifact)
"bundle":
"description": |-
The name of the bundle when deploying to this target.
"cluster_id":
"description": |-
The ID of the cluster to use for this target.
"compute_id":
"description": |-
Deprecated. The ID of the compute to use for this target.
"default":
"description": |-
Whether this target is the default target.
"git":
"description": |-
The Git version control settings for the target.
"markdown_description": |-
The Git version control settings for the target. See [_](#git).
"mode":
"description": |-
The deployment mode for the target.
"markdown_description": |-
The deployment mode for the target. Valid values are `development` or `production`. See [_](/dev-tools/bundles/deployment-modes.md).
"permissions":
"description": |-
The permissions for deploying and running the bundle in the target.
"markdown_description": |-
The permissions for deploying and running the bundle in the target. See [_](#permission).
"presets":
"description": |-
The deployment presets for the target.
"markdown_description": |-
The deployment presets for the target. See [_](#preset).
"resources":
"description": |-
The resource definitions for the target.
"markdown_description": |-
The resource definitions for the target. See [_](#resources).
"run_as":
"description": |-
The identity to use to run the bundle.
"markdown_description": |-
The identity to use to run the bundle. See [_](#job_run_as) and [_](/dev-tools/bundles/run_as.md).
"sync":
"description": |-
The local paths to sync to the target workspace when a bundle is run or deployed.
"markdown_description": |-
The local paths to sync to the target workspace when a bundle is run or deployed. See [_](#sync).
"variables":
"description": |-
The custom variable definitions for the target.
"markdown_description": |-
The custom variable definitions for the target. See [_](/dev-tools/bundles/settings.md#variables) and [_](/dev-tools/bundles/variables.md).
"workspace":
"description": |-
The Databricks workspace for the target.
"markdown_description": |-
The Databricks workspace for the target. [_](#workspace)
github.com/databricks/cli/bundle/config.Workspace:
"artifact_path":
"description": |-
The artifact path to use within the workspace for both deployments and workflow runs
"auth_type":
"description": |-
The authentication type.
"azure_client_id":
"description": |-
The Azure client ID
"azure_environment":
"description": |-
The Azure environment
"azure_login_app_id":
"description": |-
The Azure login app ID
"azure_tenant_id":
"description": |-
The Azure tenant ID
"azure_use_msi":
"description": |-
Whether to use MSI for Azure
"azure_workspace_resource_id":
"description": |-
The Azure workspace resource ID
"client_id":
"description": |-
The client ID for the workspace
"file_path":
"description": |-
The file path to use within the workspace for both deployments and workflow runs
"google_service_account":
"description": |-
The Google service account name
"host":
"description": |-
The Databricks workspace host URL
"profile":
"description": |-
The Databricks workspace profile name
"resource_path":
"description": |-
The workspace resource path
"root_path":
"description": |-
The Databricks workspace root path
"state_path":
"description": |-
The workspace state path
github.com/databricks/cli/bundle/config/resources.Grant:
"principal":
"description": |-
The name of the principal that will be granted privileges
"privileges":
"description": |-
The privileges to grant to the specified entity
github.com/databricks/cli/bundle/config/resources.Permission:
"group_name":
"description": |-
The name of the group that has the permission set in level.
"level":
"description": |-
The allowed permission for user, group, service principal defined for this permission.
"service_principal_name":
"description": |-
The name of the service principal that has the permission set in level.
"user_name":
"description": |-
The name of the user that has the permission set in level.
github.com/databricks/cli/bundle/config/variable.Lookup:
"alert":
"description": |-
PLACEHOLDER
"cluster":
"description": |-
PLACEHOLDER
"cluster_policy":
"description": |-
PLACEHOLDER
"dashboard":
"description": |-
PLACEHOLDER
"instance_pool":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
"metastore":
"description": |-
PLACEHOLDER
"notification_destination":
"description": |-
PLACEHOLDER
"pipeline":
"description": |-
PLACEHOLDER
"query":
"description": |-
PLACEHOLDER
"service_principal":
"description": |-
PLACEHOLDER
"warehouse":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.TargetVariable:
"default":
"description": |-
PLACEHOLDER
"description":
"description": |-
The description of the variable.
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"type":
"description": |-
The type of the variable.
"markdown_description":
"description": |-
The type of the variable.
github.com/databricks/cli/bundle/config/variable.Variable:
"default":
"description": |-
PLACEHOLDER
"description":
"description": |-
The description of the variable
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"markdown_description": |-
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
"type":
"description": |-
The type of the variable.
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
"ai21labs_api_key":
"description": |-
PLACEHOLDER
"ai21labs_api_key_plaintext":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
"private_key":
"description": |-
PLACEHOLDER
"private_key_plaintext":
"description": |-
PLACEHOLDER
"project_id":
"description": |-
PLACEHOLDER
"region":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
"microsoft_entra_client_id":
"description": |-
PLACEHOLDER
"microsoft_entra_client_secret":
"description": |-
PLACEHOLDER
"microsoft_entra_client_secret_plaintext":
"description": |-
PLACEHOLDER
"microsoft_entra_tenant_id":
"description": |-
PLACEHOLDER
"openai_api_base":
"description": |-
PLACEHOLDER
"openai_api_key":
"description": |-
PLACEHOLDER
"openai_api_key_plaintext":
"description": |-
PLACEHOLDER
"openai_api_type":
"description": |-
PLACEHOLDER
"openai_api_version":
"description": |-
PLACEHOLDER
"openai_deployment_name":
"description": |-
PLACEHOLDER
"openai_organization":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
"palm_api_key":
"description": |-
PLACEHOLDER
"palm_api_key_plaintext":
"description": |-
PLACEHOLDER

File diff suppressed because it is too large Load Diff

View File

@ -1,155 +0,0 @@
github.com/databricks/cli/bundle/config/resources.Cluster:
"data_security_mode":
"description": |-
PLACEHOLDER
"docker_image":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"runtime_engine":
"description": |-
PLACEHOLDER
"workload_type":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Dashboard:
"embed_credentials":
"description": |-
PLACEHOLDER
"file_path":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Job:
"health":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"run_as":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowModel:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Pipeline:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"table_name":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"grants":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Schema:
"grants":
"description": |-
PLACEHOLDER
"properties":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Volume:
"grants":
"description": |-
PLACEHOLDER
"volume_type":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes:
"availability":
"description": |-
PLACEHOLDER
"ebs_volume_type":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes:
"availability":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec:
"data_security_mode":
"description": |-
PLACEHOLDER
"docker_image":
"description": |-
PLACEHOLDER
"runtime_engine":
"description": |-
PLACEHOLDER
"workload_type":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.DockerImage:
"basic_auth":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes:
"availability":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.GitSource:
"git_snapshot":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment:
"spec":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule:
"metric":
"description": |-
PLACEHOLDER
"op":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules:
"rules":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask:
"python_named_params":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Task:
"health":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings:
"table_update":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Webhook:
"id":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger:
"quartz_cron_schedule":
"description": |-
PLACEHOLDER
"timezone_id":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
"cron":
"description": |-
PLACEHOLDER
"manual":
"description": |-
PLACEHOLDER

View File

@ -1,44 +0,0 @@
package main
import (
"testing"
)
func TestConvertLinksToAbsoluteUrl(t *testing.T) {
tests := []struct {
input string
expected string
}{
{
input: "",
expected: "",
},
{
input: "Some text (not a link)",
expected: "Some text (not a link)",
},
{
input: "This is a link to [_](#section)",
expected: "This is a link to [section](https://docs.databricks.com/dev-tools/bundles/reference.html#section)",
},
{
input: "This is a link to [_](/dev-tools/bundles/resources.html#dashboard)",
expected: "This is a link to [dashboard](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboard)",
},
{
input: "This is a link to [_](/dev-tools/bundles/resources.html)",
expected: "This is a link to [link](https://docs.databricks.com/dev-tools/bundles/resources.html)",
},
{
input: "This is a link to [external](https://external.com)",
expected: "This is a link to [external](https://external.com)",
},
}
for _, test := range tests {
result := convertLinksToAbsoluteUrl(test.input)
if result != test.expected {
t.Errorf("For input '%s', expected '%s', but got '%s'", test.input, test.expected, result)
}
}
}

View File

@ -5,7 +5,6 @@ import (
"fmt"
"log"
"os"
"path/filepath"
"reflect"
"github.com/databricks/cli/bundle/config"
@ -44,8 +43,7 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
case jsonschema.ArrayType, jsonschema.ObjectType:
// arrays and objects can have complex variable values specified.
return jsonschema.Schema{
// OneOf is used because we don't expect more than 1 match and schema-based auto-complete works better with OneOf
OneOf: []jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{
Type: jsonschema.StringType,
@ -57,7 +55,7 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
// primitives can have variable values, or references like ${bundle.xyz}
// or ${workspace.xyz}
return jsonschema.Schema{
OneOf: []jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{Type: jsonschema.StringType, Pattern: interpolationPattern("resources")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},
@ -115,60 +113,37 @@ func makeVolumeTypeOptional(typ reflect.Type, s jsonschema.Schema) jsonschema.Sc
}
func main() {
if len(os.Args) != 3 {
fmt.Println("Usage: go run main.go <work-dir> <output-file>")
if len(os.Args) != 2 {
fmt.Println("Usage: go run main.go <output-file>")
os.Exit(1)
}
// Directory with annotation files
workdir := os.Args[1]
// Output file, where the generated JSON schema will be written to.
outputFile := os.Args[2]
generateSchema(workdir, outputFile)
}
func generateSchema(workdir, outputFile string) {
annotationsPath := filepath.Join(workdir, "annotations.yml")
annotationsOpenApiPath := filepath.Join(workdir, "annotations_openapi.yml")
annotationsOpenApiOverridesPath := filepath.Join(workdir, "annotations_openapi_overrides.yml")
outputFile := os.Args[1]
// Input file, the databricks openapi spec.
inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC")
if inputFile != "" {
p, err := newParser(inputFile)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Writing OpenAPI annotations to %s\n", annotationsOpenApiPath)
err = p.extractAnnotations(reflect.TypeOf(config.Root{}), annotationsOpenApiPath, annotationsOpenApiOverridesPath)
if err != nil {
log.Fatal(err)
}
if inputFile == "" {
log.Fatal("DATABRICKS_OPENAPI_SPEC environment variable not set")
}
a, err := newAnnotationHandler([]string{annotationsOpenApiPath, annotationsOpenApiOverridesPath, annotationsPath})
p, err := newParser(inputFile)
if err != nil {
log.Fatal(err)
}
// Generate the JSON schema from the bundle Go struct.
s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
p.addDescriptions,
p.addEnums,
removeJobsFields,
makeVolumeTypeOptional,
a.addAnnotations,
addInterpolationPatterns,
})
if err != nil {
log.Fatal(err)
}
// Overwrite the input annotation file, adding missing annotations
err = a.syncWithMissingAnnotations(annotationsPath)
if err != nil {
log.Fatal(err)
}
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
log.Fatal(err)

View File

@ -1,125 +0,0 @@
package main
import (
"bytes"
"fmt"
"io"
"os"
"path"
"reflect"
"strings"
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/jsonschema"
"github.com/ghodss/yaml"
"github.com/stretchr/testify/assert"
)
func copyFile(src, dst string) error {
in, err := os.Open(src)
if err != nil {
return err
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, in)
if err != nil {
return err
}
return out.Close()
}
// Checks whether descriptions are added for new config fields in the annotations.yml file
// If this test fails either manually add descriptions to the `annotations.yml` or do the following:
// 1. run `make schema` from the repository root to add placeholder descriptions
// 2. replace all "PLACEHOLDER" values with the actual descriptions if possible
// 3. run `make schema` again to regenerate the schema with acutal descriptions
func TestRequiredAnnotationsForNewFields(t *testing.T) {
workdir := t.TempDir()
annotationsPath := path.Join(workdir, "annotations.yml")
annotationsOpenApiPath := path.Join(workdir, "annotations_openapi.yml")
annotationsOpenApiOverridesPath := path.Join(workdir, "annotations_openapi_overrides.yml")
// Copy existing annotation files from the same folder as this test
err := copyFile("annotations.yml", annotationsPath)
assert.NoError(t, err)
err = copyFile("annotations_openapi.yml", annotationsOpenApiPath)
assert.NoError(t, err)
err = copyFile("annotations_openapi_overrides.yml", annotationsOpenApiOverridesPath)
assert.NoError(t, err)
generateSchema(workdir, path.Join(t.TempDir(), "schema.json"))
originalFile, err := os.ReadFile("annotations.yml")
assert.NoError(t, err)
currentFile, err := os.ReadFile(annotationsPath)
assert.NoError(t, err)
original, err := yamlloader.LoadYAML("", bytes.NewBuffer(originalFile))
assert.NoError(t, err)
current, err := yamlloader.LoadYAML("", bytes.NewBuffer(currentFile))
assert.NoError(t, err)
// Collect added paths.
var updatedFieldPaths []string
_, err = merge.Override(original, current, merge.OverrideVisitor{
VisitInsert: func(basePath dyn.Path, right dyn.Value) (dyn.Value, error) {
updatedFieldPaths = append(updatedFieldPaths, basePath.String())
return right, nil
},
})
assert.NoError(t, err)
assert.Empty(t, updatedFieldPaths, fmt.Sprintf("Missing JSON-schema descriptions for new config fields in bundle/internal/schema/annotations.yml:\n%s", strings.Join(updatedFieldPaths, "\n")))
}
// Checks whether types in annotation files are still present in Config type
func TestNoDetachedAnnotations(t *testing.T) {
files := []string{
"annotations.yml",
"annotations_openapi.yml",
"annotations_openapi_overrides.yml",
}
types := map[string]bool{}
for _, file := range files {
annotations, err := getAnnotations(file)
assert.NoError(t, err)
for k := range annotations {
types[k] = false
}
}
_, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
delete(types, getPath(typ))
return s
},
})
assert.NoError(t, err)
for typ := range types {
t.Errorf("Type `%s` in annotations file is not found in `root.Config` type", typ)
}
assert.Empty(t, types, "Detached annotations found, regenerate schema and check for package path changes")
}
func getAnnotations(path string) (annotationFile, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var data annotationFile
err = yaml.Unmarshal(b, &data)
return data, err
}

View File

@ -1,7 +1,6 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"os"
@ -9,9 +8,6 @@ import (
"reflect"
"strings"
"github.com/ghodss/yaml"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/jsonschema"
)
@ -27,8 +23,6 @@ type openapiParser struct {
ref map[string]jsonschema.Schema
}
const RootTypeKey = "_"
func newParser(path string) (*openapiParser, error) {
b, err := os.ReadFile(path)
if err != nil {
@ -95,95 +89,35 @@ func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
}
// Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error {
annotations := annotationFile{}
overrides := annotationFile{}
b, err := os.ReadFile(overridesPath)
if err != nil {
return err
}
err = yaml.Unmarshal(b, &overrides)
if err != nil {
return err
}
if overrides == nil {
overrides = annotationFile{}
}
_, err = jsonschema.FromType(typ, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
return s
}
basePath := getPath(typ)
pkg := map[string]annotation{}
annotations[basePath] = pkg
if ref.Description != "" || ref.Enum != nil {
pkg[RootTypeKey] = annotation{Description: ref.Description, Enum: ref.Enum}
}
for k := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
pkg[k] = annotation{Description: refProp.Description, Enum: refProp.Enum}
if refProp.Description == "" {
addEmptyOverride(k, basePath, overrides)
}
} else {
addEmptyOverride(k, basePath, overrides)
}
}
return s
},
})
if err != nil {
return err
}
b, err = yaml.Marshal(overrides)
if err != nil {
return err
}
o, err := yamlloader.LoadYAML("", bytes.NewBuffer(b))
if err != nil {
return err
}
err = saveYamlWithStyle(overridesPath, o)
if err != nil {
return err
}
b, err = yaml.Marshal(annotations)
if err != nil {
return err
}
b = bytes.Join([][]byte{[]byte("# This file is auto-generated. DO NOT EDIT."), b}, []byte("\n"))
err = os.WriteFile(outputPath, b, 0o644)
if err != nil {
return err
}
return nil
}
func addEmptyOverride(key, pkg string, overridesFile annotationFile) {
if overridesFile[pkg] == nil {
overridesFile[pkg] = map[string]annotation{}
}
overrides := overridesFile[pkg]
if overrides[key].Description == "" {
overrides[key] = annotation{Description: Placeholder}
}
a, ok := overrides[key]
func (p *openapiParser) addDescriptions(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
a = annotation{}
return s
}
if a.Description == "" {
a.Description = Placeholder
s.Description = ref.Description
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Description = refProp.Description
}
}
overrides[key] = a
return s
}
// Use the OpenAPI spec add enum values for the given type.
func (p *openapiParser) addEnums(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
return s
}
s.Enum = append(s.Enum, ref.Enum...)
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Enum = append(v.Enum, refProp.Enum...)
}
}
return s
}

View File

@ -1,5 +0,0 @@
targets:
production:
variables:
myvar:
default: true

View File

@ -41,21 +41,21 @@ func TestJsonSchema(t *testing.T) {
resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job")
fields := []string{"name", "continuous", "tasks", "trigger"}
for _, field := range fields {
assert.NotEmpty(t, resourceJob.OneOf[0].Properties[field].Description)
assert.NotEmpty(t, resourceJob.AnyOf[0].Properties[field].Description)
}
// Assert descriptions were also loaded for a job task definition.
jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task")
fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"}
for _, field := range fields {
assert.NotEmpty(t, jobTask.OneOf[0].Properties[field].Description)
assert.NotEmpty(t, jobTask.AnyOf[0].Properties[field].Description)
}
// Assert descriptions are loaded for pipelines
pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline")
fields = []string{"name", "catalog", "clusters", "channel", "continuous", "development"}
for _, field := range fields {
assert.NotEmpty(t, pipeline.OneOf[0].Properties[field].Description)
assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description)
}
providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider")

File diff suppressed because it is too large Load Diff

View File

@ -6,6 +6,7 @@ import (
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"github.com/databricks/cli/bundle/config/generate"
@ -36,8 +37,13 @@ func NewGenerateAppCommand() *cobra.Command {
cmd.Flags().StringVar(&appName, "existing-app-name", "", `App name to generate config for`)
cmd.MarkFlagRequired("existing-app-name")
cmd.Flags().StringVarP(&configDir, "config-dir", "d", filepath.Join("resources"), `Directory path where the output bundle config will be stored`)
cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", filepath.Join("src", "app"), `Directory path where the app files will be stored`)
wd, err := os.Getwd()
if err != nil {
wd = "."
}
cmd.Flags().StringVarP(&configDir, "config-dir", "d", filepath.Join(wd, "resources"), `Directory path where the output bundle config will be stored`)
cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", filepath.Join(wd, "src", "app"), `Directory path where the app files will be stored`)
cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`)
cmd.RunE = func(cmd *cobra.Command, args []string) error {
@ -54,35 +60,17 @@ func NewGenerateAppCommand() *cobra.Command {
return err
}
// Making sure the config directory and source directory are absolute paths.
if !filepath.IsAbs(configDir) {
configDir = filepath.Join(b.BundleRootPath, configDir)
}
if !filepath.IsAbs(sourceDir) {
sourceDir = filepath.Join(b.BundleRootPath, sourceDir)
}
downloader := newDownloader(w, sourceDir, configDir)
sourceCodePath := app.DefaultSourceCodePath
err = downloader.markDirectoryForDownload(ctx, &sourceCodePath)
if err != nil {
return err
}
downloader.markDirectoryForDownload(ctx, &sourceCodePath)
appConfig, err := getAppConfig(ctx, app, w)
if err != nil {
return fmt.Errorf("failed to get app config: %w", err)
}
// Making sure the source code path is relative to the config directory.
rel, err := filepath.Rel(configDir, sourceDir)
if err != nil {
return err
}
v, err := generate.ConvertAppToValue(app, filepath.ToSlash(rel), appConfig)
v, err := generate.ConvertAppToValue(app, sourceCodePath, appConfig)
if err != nil {
return err
}
@ -126,7 +114,7 @@ func NewGenerateAppCommand() *cobra.Command {
return cmd
}
func getAppConfig(ctx context.Context, app *apps.App, w *databricks.WorkspaceClient) (map[string]any, error) {
func getAppConfig(ctx context.Context, app *apps.App, w *databricks.WorkspaceClient) (map[string]interface{}, error) {
sourceCodePath := app.DefaultSourceCodePath
f, err := filer.NewWorkspaceFilesClient(w, sourceCodePath)
@ -152,7 +140,7 @@ func getAppConfig(ctx context.Context, app *apps.App, w *databricks.WorkspaceCli
return nil, err
}
var appConfig map[string]any
var appConfig map[string]interface{}
err = yaml.Unmarshal(content, &appConfig)
if err != nil {
cmdio.LogString(ctx, fmt.Sprintf("Failed to parse app configuration:\n%s\nerr: %v", string(content), err))

View File

@ -4,7 +4,7 @@ import (
"encoding/base64"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"

View File

@ -5,7 +5,7 @@ import (
"io"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/databricks/databricks-sdk-go/service/apps"
@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/require"
)
func TestDeployBundleWithApp(t *testing.T) {
func TestAccDeployBundleWithApp(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
uniqueId := uuid.New().String()
appId := fmt.Sprintf("app-%s", uuid.New().String()[0:8])
@ -70,7 +70,8 @@ env:
value: "%d"`, job.JobId))
// Try to run the app
_, out := runResourceWithStderr(t, ctx, root, "test_app")
_, out, err := runResourceWithStderr(t, ctx, root, "test_app")
require.NoError(t, err)
require.Contains(t, out, app.Url)
// App should be in the running state
@ -88,7 +89,8 @@ env:
require.Equal(t, apps.ApplicationStateUnavailable, app.AppStatus.State)
// Try to run the app again
_, out = runResourceWithStderr(t, ctx, root, "test_app")
_, out, err = runResourceWithStderr(t, ctx, root, "test_app")
require.NoError(t, err)
require.Contains(t, out, app.Url)
// App should be in the running state

View File

@ -12,7 +12,7 @@ import (
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"

View File

@ -5,7 +5,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/require"

View File

@ -6,7 +6,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"

View File

@ -4,7 +4,7 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/google/uuid"
@ -12,12 +12,12 @@ import (
)
func TestDeployBundleWithCluster(t *testing.T) {
if testutil.GetCloud(t) == testutil.AWS {
ctx, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt) {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
}
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root := initTestTemplate(t, ctx, "clusters", map[string]any{
@ -44,11 +44,6 @@ func TestDeployBundleWithCluster(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, cluster)
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, root, "foo")
require.NoError(t, err)
require.Contains(t, out, "Hello World!")

View File

@ -4,7 +4,7 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/workspace"

View File

@ -11,7 +11,7 @@ import (
"testing"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"

View File

@ -5,7 +5,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"

View File

@ -4,7 +4,7 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/google/uuid"
"github.com/stretchr/testify/require"

View File

@ -7,7 +7,7 @@ import (
"testing"
"github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/google/uuid"

View File

@ -6,7 +6,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/google/uuid"

View File

@ -6,7 +6,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)

View File

@ -3,7 +3,7 @@ package bundle_test
import (
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)

View File

@ -9,7 +9,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"

View File

@ -9,7 +9,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"

View File

@ -119,15 +119,13 @@ func runResource(t testutil.TestingT, ctx context.Context, path, key string) (st
return stdout.String(), err
}
func runResourceWithStderr(t testutil.TestingT, ctx context.Context, path, key string) (string, string) {
func runResourceWithStderr(t testutil.TestingT, ctx context.Context, path, key string) (string, string, error) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
ctx = cmdio.NewContext(ctx, cmdio.Default())
c := testcli.NewRunner(t, ctx, "bundle", "run", key)
stdout, stderr, err := c.Run()
require.NoError(t, err)
return stdout.String(), stderr.String()
return stdout.String(), stderr.String(), err
}
func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key string, params ...string) (string, error) {

View File

@ -11,7 +11,7 @@ import (
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/iamutil"

View File

@ -10,7 +10,7 @@ import (
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/metadata"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/google/uuid"

View File

@ -4,7 +4,7 @@ import (
"context"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/jobs"

View File

@ -3,7 +3,7 @@ package bundle_test
import (
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/google/uuid"
@ -29,11 +29,6 @@ func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonW
destroyBundle(t, ctx, bundleRoot)
})
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, bundleRoot, "some_other_job")
require.NoError(t, err)
require.Contains(t, out, "Hello from my func")
@ -56,7 +51,9 @@ func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
}
func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
if testutil.GetCloud(t) == testutil.AWS {
_, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt) {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
}

View File

@ -4,7 +4,7 @@ import (
"context"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/google/uuid"
@ -30,11 +30,6 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, arti
destroyBundle(t, ctx, bundleRoot)
})
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, bundleRoot, "jar_job")
require.NoError(t, err)
require.Contains(t, out, "Hello from Jar!")

View File

@ -6,7 +6,7 @@ import (
"regexp"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/compute"

View File

@ -7,7 +7,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"

View File

@ -5,7 +5,7 @@ import (
"path"
"path/filepath"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"

View File

@ -6,7 +6,7 @@ import (
"strconv"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go"

View File

@ -6,7 +6,7 @@ import (
"fmt"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/workspace"

View File

@ -3,7 +3,7 @@ package storage_credentials_test
import (
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert"

View File

@ -15,7 +15,7 @@ import (
"testing"
"time"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"

View File

@ -11,7 +11,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/service/workspace"

View File

@ -7,7 +7,7 @@ import (
"path"
"path/filepath"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"

View File

@ -8,7 +8,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert"

View File

@ -11,7 +11,7 @@ import (
"testing"
"time"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
lockpkg "github.com/databricks/cli/libs/locker"

View File

@ -4,7 +4,7 @@ import (
"strings"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"

View File

@ -14,7 +14,7 @@ import (
"time"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"

View File

@ -31,7 +31,7 @@ function cli_snapshot_directory() {
dir="${dir}_386"
;;
arm64|aarch64)
dir="${dir}_arm64_v8.0"
dir="${dir}_arm64"
;;
armv7l|armv8l)
dir="${dir}_arm_6"

View File

@ -58,3 +58,7 @@ func GetCloud(t TestingT) Cloud {
}
return -1
}
func IsAWSCloud(t TestingT) bool {
return GetCloud(t) == AWS
}

View File

@ -23,7 +23,7 @@ func RandomName(prefix ...string) string {
randLen := 12
b := make([]byte, randLen)
for i := range b {
b[i] = charset[rand.Intn(len(charset))]
b[i] = charset[rand.Intn(randLen)]
}
if len(prefix) > 0 {
return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b)

View File

@ -34,10 +34,4 @@ type Extension struct {
// Version of the schema. This is used to determine if the schema is
// compatible with the current CLI version.
Version *int `json:"version,omitempty"`
// This field is not in JSON schema spec, but it is supported in VSCode and in the Databricks Workspace
// It is used to provide a rich description of the field in the hover tooltip.
// https://code.visualstudio.com/docs/languages/json#_use-rich-formatting-in-hovers
// Also it can be used in documentation generation.
MarkdownDescription string `json:"markdownDescription,omitempty"`
}

View File

@ -69,13 +69,6 @@ type Schema struct {
// Schema that must match any of the schemas in the array
AnyOf []Schema `json:"anyOf,omitempty"`
// Schema that must match one of the schemas in the array
OneOf []Schema `json:"oneOf,omitempty"`
// Title of the object, rendered as inline documentation in the IDE.
// https://json-schema.org/understanding-json-schema/reference/annotations
Title string `json:"title,omitempty"`
}
// Default value defined in a JSON Schema, represented as a string.