Compare commits

...

33 Commits

Author SHA1 Message Date
Andrew Nester 2eda8e93f6
fix lint 2024-12-18 13:10:59 +01:00
Andrew Nester 72f70bf872
removed the test 2024-12-18 13:07:15 +01:00
Andrew Nester fa9278ebbe
fixed generating to different source folder 2024-12-18 12:35:19 +01:00
Andrew Nester e31306a31a
added missing err check 2024-12-18 12:35:18 +01:00
Andrew Nester 64bcb0a5f6
Added support for bundle generate and bind for Apps 2024-12-18 12:35:18 +01:00
Andrew Nester 06b8ac44bb
Added support for Databricks Apps in DABs 2024-12-18 12:35:18 +01:00
Andrew Nester e9e0566ada
added annotations 2024-12-18 12:34:12 +01:00
Andrew Nester f6de4c1e9e
feedback 2024-12-18 12:28:18 +01:00
Andrew Nester ab85a7f992
removed app id 2024-12-18 12:28:18 +01:00
Andrew Nester e8cae54be9
removed test prefix 2024-12-18 12:28:18 +01:00
Andrew Nester 1ceec5ea3b
fixed acc test 2024-12-18 12:28:18 +01:00
Andrew Nester 6655eb6a00
fixed tests after rebase 2024-12-18 12:28:18 +01:00
Andrew Nester e7a377cd1c
fixed lint 2024-12-18 12:28:18 +01:00
Andrew Nester 53393d57b1
test fixes 2024-12-18 12:28:17 +01:00
Andrew Nester 1ff7bd2b2e
fixes 2024-12-18 12:28:17 +01:00
Andrew Nester ddeeefef21
fixes after rebase 2024-12-18 12:28:17 +01:00
Andrew Nester a71289a969
use TF provider 1.61.0 2024-12-18 12:28:17 +01:00
Andrew Nester 2c359b4372
interpolate after 2024-12-18 12:28:17 +01:00
Andrew Nester 1a58f1b1e4
addressed feedback 2024-12-18 12:28:16 +01:00
Andrew Nester 28d69a5144
fix fmt 2024-12-18 12:28:16 +01:00
Andrew Nester ddc012c324
addressed feedback 2024-12-18 12:28:16 +01:00
Andrew Nester 483a239cce
chnaged to wait for deployment on start 2024-12-18 12:28:16 +01:00
Andrew Nester f51eefe6bd
fix test 2024-12-18 12:28:16 +01:00
Andrew Nester 395645f3f4
Added integration test 2024-12-18 12:28:15 +01:00
Andrew Nester e9d9caf07a
Added support for Databricks Apps in DABs 2024-12-18 12:28:12 +01:00
Ilya Kuznetsov 042c8d88c6
Custom annotations for bundle-specific JSON schema fields (#1957)
## Changes

Adds annotations to json-schema for fields which are not covered by
OpenAPI spec.

Custom descriptions were copy-pasted from documentation PR which is
still WIP so descriptions for some fields are missing

Further improvements:
* documentation autogen based on json-schema
* fix missing descriptions

## Tests

This script is not part of CLI package so I didn't test all corner
cases. Few high-level tests were added to be sure that schema
annotations is in sync with actual config

---------

Co-authored-by: Pieter Noordhuis <pieter.noordhuis@databricks.com>
2024-12-18 10:19:14 +00:00
Andrew Nester 5b84856b17
Correctly handle required query params in CLI generation (#2027)
## Changes
If there's required query params, it is a top-level field of request
object and not a field of nested request body.

This is needed for upcoming changes from OpenAPI spec changes where such
query parameters is introduced.

No changes after regenerating CLI with current spec and the fix (appears
we haven't had such params before)
2024-12-17 20:05:42 +01:00
Pieter Noordhuis 13fa43e0f5
Remove superfluous helper (#2028)
## Changes

There was only one helper for AWS and not the other clouds. Found this
when looking through double calls to `acc.WorkspaceTest()` (see
`TestPythonWheelTaskDeployAndRunOnInteractiveCluster`).

## Tests

n/a
2024-12-17 17:34:09 +00:00
Pieter Noordhuis 23ddee8023
Skip job runs during integration testing for PRs (#2024)
## Changes

A small subset of tests trigger cluster creation to run jobs. These
tests comprise a substantial amount of the total integration test
runtime. We can skip them on PRs and only run them on the main branch.

## Tests

Confirmed the short runtime is ~20 mins.
2024-12-17 17:16:58 +00:00
Denis Bilenko 2fa3b48083
Remove 'make fmt' and 'fmt' workflow (#2026)
Remove unnecessary make command and github workflow - it's a subset of
"lint" now. However, keep "mod tidy" separately, don't think the linter
does that.
2024-12-17 16:34:54 +01:00
Pieter Noordhuis d7eac598cd
Move integration test helpers to `integration/internal` (#2022)
## Changes

The `acc` package is exclusively used by integration tests, so it
belongs under `integration/internal`.

It's not the best name we can rename later.

## Tests

n/a
2024-12-17 08:45:58 +01:00
Andrew Nester e60fe1bff2
Fixed downloading arm64 binaries (#2021)
## Changes
Fixed downloading arm64 binaries

Go 1.23 changed the way built binaries are prefixed on amd64, more
details here: https://tip.golang.org/doc/go1.23#arm64
2024-12-16 17:34:22 +01:00
Denis Bilenko b6f299974f
Fix testutil.RandomName to use the full character set (#2020)
## Changes
It was using first 12 chars, that does not seem intended.

## Tests
Existing tests.
2024-12-16 17:21:20 +01:00
103 changed files with 6376 additions and 362 deletions

View File

@ -11,7 +11,7 @@
"required": ["go"], "required": ["go"],
"post_generate": [ "post_generate": [
"go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build", "go test -timeout 240s -run TestConsistentDatabricksSdkVersion github.com/databricks/cli/internal/build",
"go run ./bundle/internal/schema/*.go ./bundle/schema/jsonschema.json", "make schema",
"echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes", "echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes",
"echo 'go.sum linguist-generated=true' >> ./.gitattributes", "echo 'go.sum linguist-generated=true' >> ./.gitattributes",
"echo 'bundle/schema/jsonschema.json linguist-generated=true' >> ./.gitattributes" "echo 'bundle/schema/jsonschema.json linguist-generated=true' >> ./.gitattributes"

View File

@ -411,5 +411,5 @@ func new{{.PascalName}}() *cobra.Command {
{{- define "request-body-obj" -}} {{- define "request-body-obj" -}}
{{- $method := .Method -}} {{- $method := .Method -}}
{{- $field := .Field -}} {{- $field := .Field -}}
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (not $field.IsPath)) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}} {{$method.CamelName}}Req{{ if (and $method.RequestBodyField (and (not $field.IsPath) (not $field.IsQuery))) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
{{- end -}} {{- end -}}

View File

@ -57,38 +57,6 @@ jobs:
- name: Publish test coverage - name: Publish test coverage
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v4
fmt:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: 1.23.2
# No need to download cached dependencies when running gofmt.
cache: false
- name: Install goimports
run: |
go install golang.org/x/tools/cmd/goimports@latest
- name: Run make fmt
run: |
make fmt
- name: Run go mod tidy
run: |
go mod tidy
- name: Fail on differences
run: |
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
golangci: golangci:
name: lint name: lint
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -97,6 +65,13 @@ jobs:
- uses: actions/setup-go@v5 - uses: actions/setup-go@v5
with: with:
go-version: 1.23.2 go-version: 1.23.2
- name: Run go mod tidy
run: |
go mod tidy
- name: Fail on differences
run: |
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v6 uses: golangci/golangci-lint-action@v6
with: with:
@ -124,14 +99,19 @@ jobs:
# By default the ajv-cli runs in strict mode which will fail if the schema # By default the ajv-cli runs in strict mode which will fail if the schema
# itself is not valid. Strict mode is more strict than the JSON schema # itself is not valid. Strict mode is more strict than the JSON schema
# specification. See for details: https://ajv.js.org/options.html#strict-mode-options # specification. See for details: https://ajv.js.org/options.html#strict-mode-options
# The ajv-cli is configured to use the markdownDescription keyword which is not part of the JSON schema specification,
# but is used in editors like VSCode to render markdown in the description field
- name: Validate bundle schema - name: Validate bundle schema
run: | run: |
go run main.go bundle schema > schema.json go run main.go bundle schema > schema.json
# Add markdownDescription keyword to ajv
echo "module.exports=function(a){a.addKeyword('markdownDescription')}" >> keywords.js
for file in ./bundle/internal/schema/testdata/pass/*.yml; do for file in ./bundle/internal/schema/testdata/pass/*.yml; do
ajv test -s schema.json -d $file --valid ajv test -s schema.json -d $file --valid -c=./keywords.js
done done
for file in ./bundle/internal/schema/testdata/fail/*.yml; do for file in ./bundle/internal/schema/testdata/fail/*.yml; do
ajv test -s schema.json -d $file --invalid ajv test -s schema.json -d $file --invalid -c=./keywords.js
done done

View File

@ -1,11 +1,5 @@
default: build default: build
fmt:
@echo "✓ Formatting source code with goimports ..."
@goimports -w $(shell find . -type f -name '*.go' -not -path "./vendor/*")
@echo "✓ Formatting source code with gofmt ..."
@gofmt -w $(shell find . -type f -name '*.go' -not -path "./vendor/*")
lint: vendor lint: vendor
@echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..." @echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..."
@golangci-lint run --fix ./... @golangci-lint run --fix ./...
@ -36,7 +30,16 @@ vendor:
@echo "✓ Filling vendor folder with library code ..." @echo "✓ Filling vendor folder with library code ..."
@go mod vendor @go mod vendor
integration: schema:
gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h @echo "✓ Generating json-schema ..."
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
.PHONY: fmt lint lintcheck test testonly coverage build snapshot vendor integration INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
integration:
$(INTEGRATION)
integration-short:
$(INTEGRATION) -short
.PHONY: lint lintcheck test testonly coverage build snapshot vendor schema integration integration-short

View File

@ -0,0 +1,59 @@
package apps
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar"
)
type interpolateVariables struct{}
func (i *interpolateVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
pattern := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("apps"),
dyn.AnyKey(),
dyn.Key("config"),
)
tfToConfigMap := map[string]string{
"databricks_pipeline": "pipelines",
"databricks_job": "jobs",
"databricks_mlflow_model": "models",
"databricks_mlflow_experiment": "experiments",
"databricks_model_serving": "model_serving_endpoints",
"databricks_registered_model": "registered_models",
"databricks_quality_monitor": "quality_monitors",
"databricks_schema": "schemas",
"databricks_volume": "volumes",
"databricks_cluster": "clusters",
"databricks_dashboard": "dashboards",
"databricks_app": "apps",
}
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
return dyn.MapByPattern(root, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
key, ok := tfToConfigMap[path[0].Key()]
if ok {
path = dyn.NewPath(dyn.Key("resources"), dyn.Key(key)).Append(path[1:]...)
}
return dyn.GetByPath(root, path)
})
})
})
return diag.FromErr(err)
}
func (i *interpolateVariables) Name() string {
return "apps.InterpolateVariables"
}
func InterpolateVariables() bundle.Mutator {
return &interpolateVariables{}
}

View File

@ -0,0 +1,49 @@
package apps
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/require"
)
func TestAppInterpolateVariables(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app_1": {
App: &apps.App{
Name: "my_app_1",
},
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "JOB_ID", "value": "${resources.jobs.my_job.id}"},
},
},
},
"my_app_2": {
App: &apps.App{
Name: "my_app_2",
},
},
},
Jobs: map[string]*resources.Job{
"my_job": {
ID: "123",
},
},
},
},
}
diags := bundle.Apply(context.Background(), b, InterpolateVariables())
require.Empty(t, diags)
require.Equal(t, []any([]any{map[string]any{"name": "JOB_ID", "value": "123"}}), b.Config.Resources.Apps["my_app_1"].Config["env"])
require.Nil(t, b.Config.Resources.Apps["my_app_2"].Config)
}

View File

@ -0,0 +1,111 @@
package apps
import (
"bytes"
"context"
"fmt"
"path"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer"
"golang.org/x/sync/errgroup"
"gopkg.in/yaml.v3"
)
type uploadConfig struct {
filerFactory deploy.FilerFactory
}
func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
errGroup, ctx := errgroup.WithContext(ctx)
diagsPerApp := make(map[string]diag.Diagnostic)
for key, app := range b.Config.Resources.Apps {
// If the app has a config, we need to deploy it first.
// It means we need to write app.yml file with the content of the config field
// to the remote source code path of the app.
if app.Config != nil {
if !strings.HasPrefix(app.SourceCodePath, b.Config.Workspace.FilePath) {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "App source code invalid",
Detail: fmt.Sprintf("App source code path %s is not within file path %s", app.SourceCodePath, b.Config.Workspace.FilePath),
Locations: b.Config.GetLocations(fmt.Sprintf("resources.apps.%s.source_code_path", key)),
})
continue
}
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)
buf, err := configToYaml(app)
if err != nil {
return diag.FromErr(err)
}
// When the app is started, create a new app deployment and wait for it to complete.
f, err := u.filerFactory(b)
if err != nil {
return diag.FromErr(err)
}
errGroup.Go(func() error {
err = f.Write(ctx, path.Join(appPath, "app.yml"), buf, filer.OverwriteIfExists)
if err != nil {
diagsPerApp[key] = diag.Diagnostic{
Severity: diag.Error,
Summary: "Failed to save config",
Detail: fmt.Sprintf("Failed to write %s file: %s", path.Join(app.SourceCodePath, "app.yml"), err),
Locations: b.Config.GetLocations(fmt.Sprintf("resources.apps.%s", key)),
}
}
return nil
})
}
}
if err := errGroup.Wait(); err != nil {
return diags.Extend(diag.FromErr(err))
}
for _, diag := range diagsPerApp {
diags = append(diags, diag)
}
return diags
}
// Name implements bundle.Mutator.
func (u *uploadConfig) Name() string {
return "apps:UploadConfig"
}
func UploadConfig() bundle.Mutator {
return &uploadConfig{
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
return filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.FilePath)
},
}
}
func configToYaml(app *resources.App) (*bytes.Buffer, error) {
buf := bytes.NewBuffer(nil)
enc := yaml.NewEncoder(buf)
enc.SetIndent(2)
err := enc.Encode(app.Config)
defer enc.Close()
if err != nil {
return nil, fmt.Errorf("failed to encode app config to yaml: %w", err)
}
return buf, nil
}

View File

@ -0,0 +1,69 @@
package apps
import (
"bytes"
"context"
"os"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
mockfiler "github.com/databricks/cli/internal/mocks/libs/filer"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)
func TestAppUploadConfig(t *testing.T) {
root := t.TempDir()
err := os.MkdirAll(filepath.Join(root, "my_app"), 0o700)
require.NoError(t, err)
b := &bundle.Bundle{
BundleRootPath: root,
SyncRoot: vfs.MustNew(root),
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Users/foo@bar.com/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app": {
App: &apps.App{
Name: "my_app",
},
SourceCodePath: "./my_app",
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "MY_APP", "value": "my value"},
},
},
},
},
},
},
}
mockFiler := mockfiler.NewMockFiler(t)
mockFiler.EXPECT().Write(mock.Anything, "my_app/app.yml", bytes.NewBufferString(`command:
- echo
- hello
env:
- name: MY_APP
value: my value
`), filer.OverwriteIfExists).Return(nil)
u := uploadConfig{
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
return mockFiler, nil
},
}
diags := bundle.Apply(context.Background(), b, &u)
require.NoError(t, diags.Error())
}

40
bundle/apps/validate.go Normal file
View File

@ -0,0 +1,40 @@
package apps
import (
"context"
"fmt"
"path"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
)
type validate struct{}
func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
possibleConfigFiles := []string{"app.yml", "app.yaml"}
for _, app := range b.Config.Resources.Apps {
for _, configFile := range possibleConfigFiles {
cf := path.Join(app.SourceCodePath, configFile)
if _, err := b.SyncRoot.Stat(cf); err == nil {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("%s detected", configFile),
Detail: fmt.Sprintf("remove %s and use 'config' property for app resource '%s' instead", cf, app.Name),
})
}
}
}
return diags
}
func (v *validate) Name() string {
return "apps.Validate"
}
func Validate() bundle.Mutator {
return &validate{}
}

View File

@ -0,0 +1,55 @@
package apps
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/require"
)
func TestAppsValidate(t *testing.T) {
tmpDir := t.TempDir()
testutil.Touch(t, tmpDir, "app1", "app.yml")
testutil.Touch(t, tmpDir, "app2", "app.py")
b := &bundle.Bundle{
BundleRootPath: tmpDir,
SyncRootPath: tmpDir,
SyncRoot: vfs.MustNew(tmpDir),
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"app1": {
App: &apps.App{
Name: "app1",
},
SourceCodePath: "./app1",
},
"app2": {
App: &apps.App{
Name: "app2",
},
SourceCodePath: "./app2",
},
},
},
},
}
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate()))
require.Len(t, diags, 1)
require.Equal(t, "app.yml detected", diags[0].Summary)
require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource")
}

View File

@ -0,0 +1,37 @@
package generate
import (
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/apps"
)
func ConvertAppToValue(app *apps.App, sourceCodePath string, appConfig map[string]any) (dyn.Value, error) {
ac, err := convert.FromTyped(appConfig, dyn.NilValue)
if err != nil {
return dyn.NilValue, err
}
ar, err := convert.FromTyped(app.Resources, dyn.NilValue)
if err != nil {
return dyn.NilValue, err
}
// The majority of fields of the app struct are read-only.
// We copy the relevant fields manually.
dv := map[string]dyn.Value{
"name": dyn.NewValue(app.Name, []dyn.Location{{Line: 1}}),
"description": dyn.NewValue(app.Description, []dyn.Location{{Line: 2}}),
"source_code_path": dyn.NewValue(sourceCodePath, []dyn.Location{{Line: 3}}),
}
if ac.Kind() != dyn.KindNil {
dv["config"] = ac.WithLocations([]dyn.Location{{Line: 4}})
}
if ar.Kind() != dyn.KindNil {
dv["resources"] = ar.WithLocations([]dyn.Location{{Line: 5}})
}
return dyn.V(dv), nil
}

View File

@ -222,6 +222,8 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
dashboard.DisplayName = prefix + dashboard.DisplayName dashboard.DisplayName = prefix + dashboard.DisplayName
} }
// Apps: No presets
if config.IsExplicitlyEnabled((b.Config.Presets.SourceLinkedDeployment)) { if config.IsExplicitlyEnabled((b.Config.Presets.SourceLinkedDeployment)) {
isDatabricksWorkspace := dbr.RunsOnRuntime(ctx) && strings.HasPrefix(b.SyncRootPath, "/Workspace/") isDatabricksWorkspace := dbr.RunsOnRuntime(ctx) && strings.HasPrefix(b.SyncRootPath, "/Workspace/")
if !isDatabricksWorkspace { if !isDatabricksWorkspace {

View File

@ -0,0 +1,45 @@
package mutator
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
)
type mergeApps struct{}
func MergeApps() bundle.Mutator {
return &mergeApps{}
}
func (m *mergeApps) Name() string {
return "MergeApps"
}
func (m *mergeApps) resourceName(v dyn.Value) string {
switch v.Kind() {
case dyn.KindInvalid, dyn.KindNil:
return ""
case dyn.KindString:
return v.MustString()
default:
panic("app name must be a string")
}
}
func (m *mergeApps) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v.Kind() == dyn.KindNil {
return v, nil
}
return dyn.Map(v, "resources.apps", dyn.Foreach(func(_ dyn.Path, app dyn.Value) (dyn.Value, error) {
return dyn.Map(app, "resources", merge.ElementsByKeyWithOverride("name", m.resourceName))
}))
})
return diag.FromErr(err)
}

View File

@ -0,0 +1,73 @@
package mutator_test
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/assert"
)
func TestMergeApps(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"foo": {
App: &apps.App{
Name: "foo",
Resources: []apps.AppResource{
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE_RUN",
},
},
{
Name: "sql1",
SqlWarehouse: &apps.AppResourceSqlWarehouse{
Id: "5678",
Permission: "CAN_USE",
},
},
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE",
},
},
{
Name: "sql1",
Job: &apps.AppResourceJob{
Id: "9876",
Permission: "CAN_MANAGE",
},
},
},
},
},
},
},
},
}
diags := bundle.Apply(context.Background(), b, mutator.MergeApps())
assert.NoError(t, diags.Error())
j := b.Config.Resources.Apps["foo"]
assert.Len(t, j.Resources, 2)
assert.Equal(t, "job1", j.Resources[0].Name)
assert.Equal(t, "sql1", j.Resources[1].Name)
assert.Equal(t, "CAN_MANAGE", string(j.Resources[0].Job.Permission))
assert.Nil(t, j.Resources[1].SqlWarehouse)
assert.Equal(t, "CAN_MANAGE", string(j.Resources[1].Job.Permission))
}

View File

@ -15,6 +15,7 @@ import (
"github.com/databricks/cli/libs/tags" "github.com/databricks/cli/libs/tags"
"github.com/databricks/cli/libs/vfs" "github.com/databricks/cli/libs/vfs"
sdkconfig "github.com/databricks/databricks-sdk-go/config" sdkconfig "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/dashboards"
@ -144,6 +145,13 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
}, },
}, },
}, },
Apps: map[string]*resources.App{
"app1": {
App: &apps.App{
Name: "app1",
},
},
},
}, },
}, },
SyncRoot: vfs.MustNew("/Users/lennart.kats@databricks.com"), SyncRoot: vfs.MustNew("/Users/lennart.kats@databricks.com"),
@ -418,6 +426,13 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) {
for _, key := range field.MapKeys() { for _, key := range field.MapKeys() {
resource := field.MapIndex(key) resource := field.MapIndex(key)
nameField := resource.Elem().FieldByName("Name") nameField := resource.Elem().FieldByName("Name")
resourceType := resources.Type().Field(i).Name
// Skip apps, as they are not renamed
if resourceType == "Apps" {
continue
}
if !nameField.IsValid() || nameField.Kind() != reflect.String { if !nameField.IsValid() || nameField.Kind() != reflect.String {
continue continue
} }

View File

@ -119,6 +119,16 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics {
)) ))
} }
// Apps do not support run_as in the API.
if len(b.Config.Resources.Apps) > 0 {
diags = diags.Extend(reportRunAsNotSupported(
"apps",
b.Config.GetLocation("resources.apps"),
b.Config.Workspace.CurrentUser.UserName,
identity,
))
}
return diags return diags
} }

View File

@ -32,6 +32,7 @@ func allResourceTypes(t *testing.T) []string {
// the dyn library gives us the correct list of all resources supported. Please // the dyn library gives us the correct list of all resources supported. Please
// also update this check when adding a new resource // also update this check when adding a new resource
require.Equal(t, []string{ require.Equal(t, []string{
"apps",
"clusters", "clusters",
"dashboards", "dashboards",
"experiments", "experiments",
@ -143,6 +144,7 @@ func TestRunAsErrorForUnsupportedResources(t *testing.T) {
"experiments", "experiments",
"schemas", "schemas",
"volumes", "volumes",
"apps",
} }
base := config.Root{ base := config.Root{

View File

@ -262,6 +262,7 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
t.applyPipelineTranslations, t.applyPipelineTranslations,
t.applyArtifactTranslations, t.applyArtifactTranslations,
t.applyDashboardTranslations, t.applyDashboardTranslations,
t.applyAppsTranslations,
} { } {
v, err = fn(v) v, err = fn(v)
if err != nil { if err != nil {

View File

@ -0,0 +1,28 @@
package mutator
import (
"fmt"
"github.com/databricks/cli/libs/dyn"
)
func (t *translateContext) applyAppsTranslations(v dyn.Value) (dyn.Value, error) {
// Convert the `source_code_path` field to a remote absolute path.
// We use this path for app deployment to point to the source code.
pattern := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("apps"),
dyn.AnyKey(),
dyn.Key("source_code_path"),
)
return dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[2].Key()
dir, err := v.Location().Directory()
if err != nil {
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for app %s: %w", key, err)
}
return t.rewriteRelativeTo(p, v, t.translateDirectoryPath, dir, "")
})
}

View File

@ -0,0 +1,57 @@
package mutator_test
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTranslatePathsApps_FilePathRelativeSubDirectory(t *testing.T) {
dir := t.TempDir()
touchEmptyFile(t, filepath.Join(dir, "src", "app", "app.py"))
b := &bundle.Bundle{
SyncRootPath: dir,
SyncRoot: vfs.MustNew(dir),
Config: config.Root{
Workspace: config.Workspace{
FilePath: "/bundle/files",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"app": {
App: &apps.App{
Name: "My App",
},
SourceCodePath: "../src/app",
},
},
},
},
}
bundletest.SetLocation(b, "resources.apps", []dyn.Location{{
File: filepath.Join(dir, "resources/app.yml"),
}})
diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
require.NoError(t, diags.Error())
// Assert that the file path for the app has been converted to its local absolute path.
assert.Equal(
t,
"/bundle/files/src/app",
b.Config.Resources.Apps["app"].SourceCodePath,
)
}

View File

@ -23,6 +23,7 @@ type Resources struct {
Volumes map[string]*resources.Volume `json:"volumes,omitempty"` Volumes map[string]*resources.Volume `json:"volumes,omitempty"`
Clusters map[string]*resources.Cluster `json:"clusters,omitempty"` Clusters map[string]*resources.Cluster `json:"clusters,omitempty"`
Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"` Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"`
Apps map[string]*resources.App `json:"apps,omitempty"`
} }
type ConfigResource interface { type ConfigResource interface {
@ -87,6 +88,7 @@ func (r *Resources) AllResources() []ResourceGroup {
collectResourceMap(descriptions["clusters"], r.Clusters), collectResourceMap(descriptions["clusters"], r.Clusters),
collectResourceMap(descriptions["dashboards"], r.Dashboards), collectResourceMap(descriptions["dashboards"], r.Dashboards),
collectResourceMap(descriptions["volumes"], r.Volumes), collectResourceMap(descriptions["volumes"], r.Volumes),
collectResourceMap(descriptions["apps"], r.Apps),
} }
} }
@ -97,12 +99,19 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
found = append(found, r.Jobs[k]) found = append(found, r.Jobs[k])
} }
} }
for k := range r.Pipelines { for k := range r.Pipelines {
if k == key { if k == key {
found = append(found, r.Pipelines[k]) found = append(found, r.Pipelines[k])
} }
} }
for k := range r.Apps {
if k == key {
found = append(found, r.Apps[k])
}
}
if len(found) == 0 { if len(found) == 0 {
return nil, fmt.Errorf("no such resource: %s", key) return nil, fmt.Errorf("no such resource: %s", key)
} }
@ -197,5 +206,11 @@ func SupportedResources() map[string]ResourceDescription {
SingularTitle: "Volume", SingularTitle: "Volume",
PluralTitle: "Volumes", PluralTitle: "Volumes",
}, },
"apps": {
SingularName: "app",
PluralName: "apps",
SingularTitle: "App",
PluralTitle: "Apps",
},
} }
} }

View File

@ -0,0 +1,71 @@
package resources
import (
"context"
"fmt"
"net/url"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
"github.com/databricks/databricks-sdk-go/service/apps"
)
type App struct {
// SourceCodePath is a required field used by DABs to point to Databricks app source code
// on local disk and to the corresponding workspace path during app deployment.
SourceCodePath string `json:"source_code_path"`
// Config is an optional field which allows configuring the app following Databricks app configuration format like in app.yml.
// When this field is set, DABs read the configuration set in this field and write
// it to app.yml in the root of the source code folder in Databricks workspace.
// If theres app.yml defined locally, DABs will raise an error.
Config map[string]any `json:"config,omitempty"`
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*apps.App
}
func (a *App) UnmarshalJSON(b []byte) error {
return marshal.Unmarshal(b, a)
}
func (a App) MarshalJSON() ([]byte, error) {
return marshal.Marshal(a)
}
func (a *App) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) {
_, err := w.Apps.GetByName(ctx, name)
if err != nil {
log.Debugf(ctx, "app %s does not exist", name)
return false, err
}
return true, nil
}
func (a *App) TerraformResourceName() string {
return "databricks_app"
}
func (a *App) InitializeURL(baseURL url.URL) {
if a.Name == "" {
return
}
baseURL.Path = fmt.Sprintf("apps/%s", a.Name)
a.URL = baseURL.String()
}
func (a *App) GetName() string {
return a.Name
}
func (a *App) GetURL() string {
return a.URL
}
func (a *App) IsNil() bool {
return a.App == nil
}

View File

@ -9,6 +9,7 @@ import (
"github.com/databricks/cli/bundle/deploy/terraform/tfdyn" "github.com/databricks/cli/bundle/deploy/terraform/tfdyn"
"github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/apps"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
) )
@ -196,6 +197,16 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
} }
cur.ID = instance.Attributes.ID cur.ID = instance.Attributes.ID
config.Resources.Dashboards[resource.Name] = cur config.Resources.Dashboards[resource.Name] = cur
case "databricks_app":
if config.Resources.Apps == nil {
config.Resources.Apps = make(map[string]*resources.App)
}
cur := config.Resources.Apps[resource.Name]
if cur == nil {
cur = &resources.App{ModifiedStatus: resources.ModifiedStatusDeleted, App: &apps.App{}}
}
cur.Name = instance.Attributes.Name
config.Resources.Apps[resource.Name] = cur
case "databricks_permissions": case "databricks_permissions":
case "databricks_grants": case "databricks_grants":
// Ignore; no need to pull these back into the configuration. // Ignore; no need to pull these back into the configuration.
@ -260,6 +271,11 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
src.ModifiedStatus = resources.ModifiedStatusCreated src.ModifiedStatus = resources.ModifiedStatusCreated
} }
} }
for _, src := range config.Resources.Apps {
if src.ModifiedStatus == "" && src.Name == "" {
src.ModifiedStatus = resources.ModifiedStatusCreated
}
}
return nil return nil
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/dashboards"
@ -694,6 +695,14 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
{Attributes: stateInstanceAttributes{ID: "1"}}, {Attributes: stateInstanceAttributes{ID: "1"}},
}, },
}, },
{
Type: "databricks_app",
Mode: "managed",
Name: "test_app",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app1"}},
},
},
}, },
} }
err := TerraformToBundle(&tfState, &config) err := TerraformToBundle(&tfState, &config)
@ -732,6 +741,9 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID) assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus) assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
assert.Equal(t, "app1", config.Resources.Apps["test_app"].Name)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app"].ModifiedStatus)
AssertFullResourceCoverage(t, &config) AssertFullResourceCoverage(t, &config)
} }
@ -815,6 +827,13 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
}, },
}, },
}, },
Apps: map[string]*resources.App{
"test_app": {
App: &apps.App{
Description: "test_app",
},
},
},
}, },
} }
tfState := resourcesState{ tfState := resourcesState{
@ -856,6 +875,9 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID) assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
assert.Equal(t, "", config.Resources.Apps["test_app"].Name)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app"].ModifiedStatus)
AssertFullResourceCoverage(t, &config) AssertFullResourceCoverage(t, &config)
} }
@ -994,6 +1016,18 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
}, },
}, },
}, },
Apps: map[string]*resources.App{
"test_app": {
App: &apps.App{
Description: "test_app",
},
},
"test_app_new": {
App: &apps.App{
Description: "test_app_new",
},
},
},
}, },
} }
tfState := resourcesState{ tfState := resourcesState{
@ -1174,6 +1208,22 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
{Attributes: stateInstanceAttributes{ID: "2"}}, {Attributes: stateInstanceAttributes{ID: "2"}},
}, },
}, },
{
Type: "databricks_app",
Mode: "managed",
Name: "test_app",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app1"}},
},
},
{
Type: "databricks_app",
Mode: "managed",
Name: "test_app_old",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app2"}},
},
},
}, },
} }
err := TerraformToBundle(&tfState, &config) err := TerraformToBundle(&tfState, &config)
@ -1256,6 +1306,13 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID) assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus)
assert.Equal(t, "app1", config.Resources.Apps["test_app"].Name)
assert.Equal(t, "", config.Resources.Apps["test_app"].ModifiedStatus)
assert.Equal(t, "app2", config.Resources.Apps["test_app_old"].Name)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app_old"].ModifiedStatus)
assert.Equal(t, "", config.Resources.Apps["test_app_new"].Name)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app_new"].ModifiedStatus)
AssertFullResourceCoverage(t, &config) AssertFullResourceCoverage(t, &config)
} }

View File

@ -63,6 +63,8 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.D
path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...) path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...)
case dyn.Key("dashboards"): case dyn.Key("dashboards"):
path = dyn.NewPath(dyn.Key("databricks_dashboard")).Append(path[2:]...) path = dyn.NewPath(dyn.Key("databricks_dashboard")).Append(path[2:]...)
case dyn.Key("apps"):
path = dyn.NewPath(dyn.Key("databricks_app")).Append(path[2:]...)
default: default:
// Trigger "key not found" for unknown resource types. // Trigger "key not found" for unknown resource types.
return dyn.GetByPath(root, path) return dyn.GetByPath(root, path)

View File

@ -34,6 +34,7 @@ func TestInterpolate(t *testing.T) {
"other_volume": "${resources.volumes.other_volume.id}", "other_volume": "${resources.volumes.other_volume.id}",
"other_cluster": "${resources.clusters.other_cluster.id}", "other_cluster": "${resources.clusters.other_cluster.id}",
"other_dashboard": "${resources.dashboards.other_dashboard.id}", "other_dashboard": "${resources.dashboards.other_dashboard.id}",
"other_app": "${resources.apps.other_app.id}",
}, },
Tasks: []jobs.Task{ Tasks: []jobs.Task{
{ {
@ -73,6 +74,7 @@ func TestInterpolate(t *testing.T) {
assert.Equal(t, "${databricks_volume.other_volume.id}", j.Tags["other_volume"]) assert.Equal(t, "${databricks_volume.other_volume.id}", j.Tags["other_volume"])
assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"]) assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"])
assert.Equal(t, "${databricks_dashboard.other_dashboard.id}", j.Tags["other_dashboard"]) assert.Equal(t, "${databricks_dashboard.other_dashboard.id}", j.Tags["other_dashboard"])
assert.Equal(t, "${databricks_app.other_app.id}", j.Tags["other_app"])
m := b.Config.Resources.Models["my_model"] m := b.Config.Resources.Models["my_model"]
assert.Equal(t, "my_model", m.Model.Name) assert.Equal(t, "my_model", m.Model.Name)

View File

@ -0,0 +1,46 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/apps"
)
func convertAppResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(apps.App{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "app normalization diagnostic: %s", diag.Summary)
}
return vout, nil
}
type appConverter struct{}
func (appConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertAppResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.App[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.AppName = fmt.Sprintf("${databricks_app.%s.name}", key)
out.Permissions["app_"+key] = permissions
}
return nil
}
func init() {
registerConverter("apps", appConverter{})
}

View File

@ -0,0 +1,98 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertApp(t *testing.T) {
src := resources.App{
SourceCodePath: "./app",
Config: map[string]any{
"command": []string{"python", "app.py"},
},
App: &apps.App{
Name: "app_id",
Description: "app description",
Resources: []apps.AppResource{
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE_RUN",
},
},
{
Name: "sql1",
SqlWarehouse: &apps.AppResourceSqlWarehouse{
Id: "5678",
Permission: "CAN_USE",
},
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_RUN",
UserName: "jack@gmail.com",
},
{
Level: "CAN_MANAGE",
ServicePrincipalName: "sp",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = appConverter{}.Convert(ctx, "my_app", vin, out)
require.NoError(t, err)
app := out.App["my_app"]
assert.Equal(t, map[string]any{
"description": "app description",
"name": "app_id",
"resources": []any{
map[string]any{
"name": "job1",
"job": map[string]any{
"id": "1234",
"permission": "CAN_MANAGE_RUN",
},
},
map[string]any{
"name": "sql1",
"sql_warehouse": map[string]any{
"id": "5678",
"permission": "CAN_USE",
},
},
},
}, app)
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
AppName: "${databricks_app.my_app.name}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_RUN",
UserName: "jack@gmail.com",
},
{
PermissionLevel: "CAN_MANAGE",
ServicePrincipalName: "sp",
},
},
}, out.Permissions["app_my_app"])
}

View File

@ -34,6 +34,7 @@ type stateResourceInstance struct {
type stateInstanceAttributes struct { type stateInstanceAttributes struct {
ID string `json:"id"` ID string `json:"id"`
Name string `json:"name,omitempty"`
ETag string `json:"etag,omitempty"` ETag string `json:"etag,omitempty"`
} }

View File

@ -97,7 +97,7 @@ func TestParseResourcesStateWithExistingStateFile(t *testing.T) {
Type: "databricks_pipeline", Type: "databricks_pipeline",
Name: "test_pipeline", Name: "test_pipeline",
Instances: []stateResourceInstance{ Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{ID: "123"}}, {Attributes: stateInstanceAttributes{ID: "123", Name: "test_pipeline"}},
}, },
}, },
}, },

View File

@ -0,0 +1,209 @@
package main
import (
"bytes"
"fmt"
"os"
"reflect"
"regexp"
"strings"
yaml3 "gopkg.in/yaml.v3"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/dyn/yamlsaver"
"github.com/databricks/cli/libs/jsonschema"
)
type annotation struct {
Description string `json:"description,omitempty"`
MarkdownDescription string `json:"markdown_description,omitempty"`
Title string `json:"title,omitempty"`
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
}
type annotationHandler struct {
// Annotations read from all annotation files including all overrides
parsedAnnotations annotationFile
// Missing annotations for fields that are found in config that need to be added to the annotation file
missingAnnotations annotationFile
}
/**
* Parsed file with annotations, expected format:
* github.com/databricks/cli/bundle/config.Bundle:
* cluster_id:
* description: "Description"
*/
type annotationFile map[string]map[string]annotation
const Placeholder = "PLACEHOLDER"
// Adds annotations to the JSON schema reading from the annotation files.
// More details https://json-schema.org/understanding-json-schema/reference/annotations
func newAnnotationHandler(sources []string) (*annotationHandler, error) {
prev := dyn.NilValue
for _, path := range sources {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
generated, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b))
if err != nil {
return nil, err
}
prev, err = merge.Merge(prev, generated)
if err != nil {
return nil, err
}
}
var data annotationFile
err := convert.ToTyped(&data, prev)
if err != nil {
return nil, err
}
d := &annotationHandler{}
d.parsedAnnotations = data
d.missingAnnotations = annotationFile{}
return d, nil
}
func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
refPath := getPath(typ)
shouldHandle := strings.HasPrefix(refPath, "github.com")
if !shouldHandle {
return s
}
annotations := d.parsedAnnotations[refPath]
if annotations == nil {
annotations = map[string]annotation{}
}
rootTypeAnnotation, ok := annotations[RootTypeKey]
if ok {
assignAnnotation(&s, rootTypeAnnotation)
}
for k, v := range s.Properties {
item := annotations[k]
if item.Description == "" {
item.Description = Placeholder
emptyAnnotations := d.missingAnnotations[refPath]
if emptyAnnotations == nil {
emptyAnnotations = map[string]annotation{}
d.missingAnnotations[refPath] = emptyAnnotations
}
emptyAnnotations[k] = item
}
assignAnnotation(v, item)
}
return s
}
// Writes missing annotations with placeholder values back to the annotation file
func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error {
existingFile, err := os.ReadFile(outputPath)
if err != nil {
return err
}
existing, err := yamlloader.LoadYAML("", bytes.NewBuffer(existingFile))
if err != nil {
return err
}
missingAnnotations, err := convert.FromTyped(&d.missingAnnotations, dyn.NilValue)
if err != nil {
return err
}
output, err := merge.Merge(existing, missingAnnotations)
if err != nil {
return err
}
err = saveYamlWithStyle(outputPath, output)
if err != nil {
return err
}
return nil
}
func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assignAnnotation(s *jsonschema.Schema, a annotation) {
if a.Description != Placeholder {
s.Description = a.Description
}
if a.Default != nil {
s.Default = a.Default
}
s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription)
s.Title = a.Title
s.Enum = a.Enum
}
func saveYamlWithStyle(outputPath string, input dyn.Value) error {
style := map[string]yaml3.Style{}
file, _ := input.AsMap()
for _, v := range file.Keys() {
style[v.MustString()] = yaml3.LiteralStyle
}
saver := yamlsaver.NewSaverWithStyle(style)
err := saver.SaveAsYAML(file, outputPath, true)
if err != nil {
return err
}
return nil
}
func convertLinksToAbsoluteUrl(s string) string {
if s == "" {
return s
}
base := "https://docs.databricks.com"
referencePage := "/dev-tools/bundles/reference.html"
// Regular expression to match Markdown-style links like [_](link)
re := regexp.MustCompile(`\[_\]\(([^)]+)\)`)
result := re.ReplaceAllStringFunc(s, func(match string) string {
matches := re.FindStringSubmatch(match)
if len(matches) < 2 {
return match
}
link := matches[1]
var text, absoluteURL string
if strings.HasPrefix(link, "#") {
text = strings.TrimPrefix(link, "#")
absoluteURL = fmt.Sprintf("%s%s%s", base, referencePage, link)
// Handle relative paths like /dev-tools/bundles/resources.html#dashboard
} else if strings.HasPrefix(link, "/") {
absoluteURL = strings.ReplaceAll(fmt.Sprintf("%s%s", base, link), ".md", ".html")
if strings.Contains(link, "#") {
parts := strings.Split(link, "#")
text = parts[1]
} else {
text = "link"
}
} else {
return match
}
return fmt.Sprintf("[%s](%s)", text, absoluteURL)
})
return result
}

View File

@ -0,0 +1,662 @@
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.App:
"create_time":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
"active_deployment":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
"service_principal_name":
"description": |-
PLACEHOLDER
"config":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"service_principal_id":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"compute_status":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"name":
"description": |-
PLACEHOLDER
"secret":
"description": |-
PLACEHOLDER
"serving_endpoint":
"description": |-
PLACEHOLDER
"sql_warehouse":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"state":
"description": |-
PLACEHOLDER
"message":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"permission":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config.Artifact:
"build":
"description": |-
An optional set of non-default build commands that you want to run locally before deployment.
For Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.
To specify multiple build commands, separate each command with double-ampersand (&&) characters.
"executable":
"description": |-
The executable type.
"files":
"description": |-
The source files for the artifact.
"markdown_description": |-
The source files for the artifact, defined as an [_](#artifact_file).
"path":
"description": |-
The location where the built artifact will be saved.
"type":
"description": |-
The type of the artifact.
"markdown_description": |-
The type of the artifact. Valid values are `wheel` or `jar`
github.com/databricks/cli/bundle/config.ArtifactFile:
"source":
"description": |-
The path of the files used to build the artifact.
github.com/databricks/cli/bundle/config.Bundle:
"cluster_id":
"description": |-
The ID of a cluster to use to run the bundle.
"markdown_description": |-
The ID of a cluster to use to run the bundle. See [_](/dev-tools/bundles/settings.md#cluster_id).
"compute_id":
"description": |-
PLACEHOLDER
"databricks_cli_version":
"description": |-
The Databricks CLI version to use for the bundle.
"markdown_description": |-
The Databricks CLI version to use for the bundle. See [_](/dev-tools/bundles/settings.md#databricks_cli_version).
"deployment":
"description": |-
The definition of the bundle deployment
"markdown_description": |-
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
"git":
"description": |-
The Git version control details that are associated with your bundle.
"markdown_description": |-
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
"name":
"description": |-
The name of the bundle.
"uuid":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config.Deployment:
"fail_on_active_runs":
"description": |-
Whether to fail on active runs. If this is set to true a deployment that is running can be interrupted.
"lock":
"description": |-
The deployment lock attributes.
"markdown_description": |-
The deployment lock attributes. See [_](#lock).
github.com/databricks/cli/bundle/config.Experimental:
"pydabs":
"description": |-
The PyDABs configuration.
"python_wheel_wrapper":
"description": |-
Whether to use a Python wheel wrapper
"scripts":
"description": |-
The commands to run
"use_legacy_run_as":
"description": |-
Whether to use the legacy run_as behavior
github.com/databricks/cli/bundle/config.Git:
"branch":
"description": |-
The Git branch name.
"markdown_description": |-
The Git branch name. See [_](/dev-tools/bundles/settings.md#git).
"origin_url":
"description": |-
The origin URL of the repository.
"markdown_description": |-
The origin URL of the repository. See [_](/dev-tools/bundles/settings.md#git).
github.com/databricks/cli/bundle/config.Lock:
"enabled":
"description": |-
Whether this lock is enabled.
"force":
"description": |-
Whether to force this lock if it is enabled.
github.com/databricks/cli/bundle/config.Presets:
"jobs_max_concurrent_runs":
"description": |-
The maximum concurrent runs for a job.
"name_prefix":
"description": |-
The prefix for job runs of the bundle.
"pipelines_development":
"description": |-
Whether pipeline deployments should be locked in development mode.
"source_linked_deployment":
"description": |-
Whether to link the deployment to the bundle source.
"tags":
"description": |-
The tags for the bundle deployment.
"trigger_pause_status":
"description": |-
A pause status to apply to all job triggers and schedules. Valid values are PAUSED or UNPAUSED.
github.com/databricks/cli/bundle/config.PyDABs:
"enabled":
"description": |-
Whether or not PyDABs (Private Preview) is enabled
"import":
"description": |-
The PyDABs project to import to discover resources, resource generator and mutators
"venv_path":
"description": |-
The Python virtual environment path
github.com/databricks/cli/bundle/config.Resources:
"apps":
"description": |-
PLACEHOLDER
"clusters":
"description": |-
The cluster definitions for the bundle.
"markdown_description": |-
The cluster definitions for the bundle. See [_](/dev-tools/bundles/resources.md#cluster)
"dashboards":
"description": |-
The dashboard definitions for the bundle.
"markdown_description": |-
The dashboard definitions for the bundle. See [_](/dev-tools/bundles/resources.md#dashboard)
"experiments":
"description": |-
The experiment definitions for the bundle.
"markdown_description": |-
The experiment definitions for the bundle. See [_](/dev-tools/bundles/resources.md#experiment)
"jobs":
"description": |-
The job definitions for the bundle.
"markdown_description": |-
The job definitions for the bundle. See [_](/dev-tools/bundles/resources.md#job)
"model_serving_endpoints":
"description": |-
The model serving endpoint definitions for the bundle.
"markdown_description": |-
The model serving endpoint definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model_serving_endpoint)
"models":
"description": |-
The model definitions for the bundle.
"markdown_description": |-
The model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model)
"pipelines":
"description": |-
The pipeline definitions for the bundle.
"markdown_description": |-
The pipeline definitions for the bundle. See [_](/dev-tools/bundles/resources.md#pipeline)
"quality_monitors":
"description": |-
The quality monitor definitions for the bundle.
"markdown_description": |-
The quality monitor definitions for the bundle. See [_](/dev-tools/bundles/resources.md#quality_monitor)
"registered_models":
"description": |-
The registered model definitions for the bundle.
"markdown_description": |-
The registered model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#registered_model)
"schemas":
"description": |-
The schema definitions for the bundle.
"markdown_description": |-
The schema definitions for the bundle. See [_](/dev-tools/bundles/resources.md#schema)
"volumes":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config.Root:
"artifacts":
"description": |-
Defines the attributes to build an artifact
"bundle":
"description": |-
The attributes of the bundle.
"markdown_description": |-
The attributes of the bundle. See [_](/dev-tools/bundles/settings.md#bundle)
"experimental":
"description": |-
Defines attributes for experimental features.
"include":
"description": |-
Specifies a list of path globs that contain configuration files to include within the bundle.
"markdown_description": |-
Specifies a list of path globs that contain configuration files to include within the bundle. See [_](/dev-tools/bundles/settings.md#include)
"permissions":
"description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle
"markdown_description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
"presets":
"description": |-
Defines bundle deployment presets.
"markdown_description": |-
Defines bundle deployment presets. See [_](/dev-tools/bundles/deployment-modes.md#presets).
"resources":
"description": |-
Specifies information about the Databricks resources used by the bundle
"markdown_description": |-
Specifies information about the Databricks resources used by the bundle. See [_](/dev-tools/bundles/resources.md).
"run_as":
"description": |-
The identity to use to run the bundle.
"sync":
"description": |-
The files and file paths to include or exclude in the bundle.
"markdown_description": |-
The files and file paths to include or exclude in the bundle. See [_](/dev-tools/bundles/)
"targets":
"description": |-
Defines deployment targets for the bundle.
"variables":
"description": |-
A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.
"workspace":
"description": |-
Defines the Databricks workspace for the bundle.
github.com/databricks/cli/bundle/config.Sync:
"exclude":
"description": |-
A list of files or folders to exclude from the bundle.
"include":
"description": |-
A list of files or folders to include in the bundle.
"paths":
"description": |-
The local folder paths, which can be outside the bundle root, to synchronize to the workspace when the bundle is deployed.
github.com/databricks/cli/bundle/config.Target:
"artifacts":
"description": |-
The artifacts to include in the target deployment.
"markdown_description": |-
The artifacts to include in the target deployment. See [_](#artifact)
"bundle":
"description": |-
The name of the bundle when deploying to this target.
"cluster_id":
"description": |-
The ID of the cluster to use for this target.
"compute_id":
"description": |-
Deprecated. The ID of the compute to use for this target.
"default":
"description": |-
Whether this target is the default target.
"git":
"description": |-
The Git version control settings for the target.
"markdown_description": |-
The Git version control settings for the target. See [_](#git).
"mode":
"description": |-
The deployment mode for the target.
"markdown_description": |-
The deployment mode for the target. Valid values are `development` or `production`. See [_](/dev-tools/bundles/deployment-modes.md).
"permissions":
"description": |-
The permissions for deploying and running the bundle in the target.
"markdown_description": |-
The permissions for deploying and running the bundle in the target. See [_](#permission).
"presets":
"description": |-
The deployment presets for the target.
"markdown_description": |-
The deployment presets for the target. See [_](#preset).
"resources":
"description": |-
The resource definitions for the target.
"markdown_description": |-
The resource definitions for the target. See [_](#resources).
"run_as":
"description": |-
The identity to use to run the bundle.
"markdown_description": |-
The identity to use to run the bundle. See [_](#job_run_as) and [_](/dev-tools/bundles/run_as.md).
"sync":
"description": |-
The local paths to sync to the target workspace when a bundle is run or deployed.
"markdown_description": |-
The local paths to sync to the target workspace when a bundle is run or deployed. See [_](#sync).
"variables":
"description": |-
The custom variable definitions for the target.
"markdown_description": |-
The custom variable definitions for the target. See [_](/dev-tools/bundles/settings.md#variables) and [_](/dev-tools/bundles/variables.md).
"workspace":
"description": |-
The Databricks workspace for the target.
"markdown_description": |-
The Databricks workspace for the target. [_](#workspace)
github.com/databricks/cli/bundle/config.Workspace:
"artifact_path":
"description": |-
The artifact path to use within the workspace for both deployments and workflow runs
"auth_type":
"description": |-
The authentication type.
"azure_client_id":
"description": |-
The Azure client ID
"azure_environment":
"description": |-
The Azure environment
"azure_login_app_id":
"description": |-
The Azure login app ID
"azure_tenant_id":
"description": |-
The Azure tenant ID
"azure_use_msi":
"description": |-
Whether to use MSI for Azure
"azure_workspace_resource_id":
"description": |-
The Azure workspace resource ID
"client_id":
"description": |-
The client ID for the workspace
"file_path":
"description": |-
The file path to use within the workspace for both deployments and workflow runs
"google_service_account":
"description": |-
The Google service account name
"host":
"description": |-
The Databricks workspace host URL
"profile":
"description": |-
The Databricks workspace profile name
"resource_path":
"description": |-
The workspace resource path
"root_path":
"description": |-
The Databricks workspace root path
"state_path":
"description": |-
The workspace state path
github.com/databricks/cli/bundle/config/resources.Grant:
"principal":
"description": |-
The name of the principal that will be granted privileges
"privileges":
"description": |-
The privileges to grant to the specified entity
github.com/databricks/cli/bundle/config/resources.Permission:
"group_name":
"description": |-
The name of the group that has the permission set in level.
"level":
"description": |-
The allowed permission for user, group, service principal defined for this permission.
"service_principal_name":
"description": |-
The name of the service principal that has the permission set in level.
"user_name":
"description": |-
The name of the user that has the permission set in level.
github.com/databricks/cli/bundle/config/variable.Lookup:
"alert":
"description": |-
PLACEHOLDER
"cluster":
"description": |-
PLACEHOLDER
"cluster_policy":
"description": |-
PLACEHOLDER
"dashboard":
"description": |-
PLACEHOLDER
"instance_pool":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
"metastore":
"description": |-
PLACEHOLDER
"notification_destination":
"description": |-
PLACEHOLDER
"pipeline":
"description": |-
PLACEHOLDER
"query":
"description": |-
PLACEHOLDER
"service_principal":
"description": |-
PLACEHOLDER
"warehouse":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.TargetVariable:
"default":
"description": |-
PLACEHOLDER
"description":
"description": |-
The description of the variable.
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"type":
"description": |-
The type of the variable.
"markdown_description":
"description": |-
The type of the variable.
github.com/databricks/cli/bundle/config/variable.Variable:
"default":
"description": |-
PLACEHOLDER
"description":
"description": |-
The description of the variable
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"markdown_description": |-
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
"type":
"description": |-
The type of the variable.
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
"ai21labs_api_key":
"description": |-
PLACEHOLDER
"ai21labs_api_key_plaintext":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
"private_key":
"description": |-
PLACEHOLDER
"private_key_plaintext":
"description": |-
PLACEHOLDER
"project_id":
"description": |-
PLACEHOLDER
"region":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
"microsoft_entra_client_id":
"description": |-
PLACEHOLDER
"microsoft_entra_client_secret":
"description": |-
PLACEHOLDER
"microsoft_entra_client_secret_plaintext":
"description": |-
PLACEHOLDER
"microsoft_entra_tenant_id":
"description": |-
PLACEHOLDER
"openai_api_base":
"description": |-
PLACEHOLDER
"openai_api_key":
"description": |-
PLACEHOLDER
"openai_api_key_plaintext":
"description": |-
PLACEHOLDER
"openai_api_type":
"description": |-
PLACEHOLDER
"openai_api_version":
"description": |-
PLACEHOLDER
"openai_deployment_name":
"description": |-
PLACEHOLDER
"openai_organization":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
"palm_api_key":
"description": |-
PLACEHOLDER
"palm_api_key_plaintext":
"description": |-
PLACEHOLDER

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,155 @@
github.com/databricks/cli/bundle/config/resources.Cluster:
"data_security_mode":
"description": |-
PLACEHOLDER
"docker_image":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"runtime_engine":
"description": |-
PLACEHOLDER
"workload_type":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Dashboard:
"embed_credentials":
"description": |-
PLACEHOLDER
"file_path":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Job:
"health":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"run_as":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowModel:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Pipeline:
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"table_name":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"grants":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Schema:
"grants":
"description": |-
PLACEHOLDER
"properties":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Volume:
"grants":
"description": |-
PLACEHOLDER
"volume_type":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes:
"availability":
"description": |-
PLACEHOLDER
"ebs_volume_type":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes:
"availability":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec:
"data_security_mode":
"description": |-
PLACEHOLDER
"docker_image":
"description": |-
PLACEHOLDER
"runtime_engine":
"description": |-
PLACEHOLDER
"workload_type":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.DockerImage:
"basic_auth":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes:
"availability":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.GitSource:
"git_snapshot":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment:
"spec":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule:
"metric":
"description": |-
PLACEHOLDER
"op":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules:
"rules":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask:
"python_named_params":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Task:
"health":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings:
"table_update":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Webhook:
"id":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger:
"quartz_cron_schedule":
"description": |-
PLACEHOLDER
"timezone_id":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
"cron":
"description": |-
PLACEHOLDER
"manual":
"description": |-
PLACEHOLDER

View File

@ -0,0 +1,44 @@
package main
import (
"testing"
)
func TestConvertLinksToAbsoluteUrl(t *testing.T) {
tests := []struct {
input string
expected string
}{
{
input: "",
expected: "",
},
{
input: "Some text (not a link)",
expected: "Some text (not a link)",
},
{
input: "This is a link to [_](#section)",
expected: "This is a link to [section](https://docs.databricks.com/dev-tools/bundles/reference.html#section)",
},
{
input: "This is a link to [_](/dev-tools/bundles/resources.html#dashboard)",
expected: "This is a link to [dashboard](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboard)",
},
{
input: "This is a link to [_](/dev-tools/bundles/resources.html)",
expected: "This is a link to [link](https://docs.databricks.com/dev-tools/bundles/resources.html)",
},
{
input: "This is a link to [external](https://external.com)",
expected: "This is a link to [external](https://external.com)",
},
}
for _, test := range tests {
result := convertLinksToAbsoluteUrl(test.input)
if result != test.expected {
t.Errorf("For input '%s', expected '%s', but got '%s'", test.input, test.expected, result)
}
}
}

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"log" "log"
"os" "os"
"path/filepath"
"reflect" "reflect"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
@ -43,7 +44,8 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
case jsonschema.ArrayType, jsonschema.ObjectType: case jsonschema.ArrayType, jsonschema.ObjectType:
// arrays and objects can have complex variable values specified. // arrays and objects can have complex variable values specified.
return jsonschema.Schema{ return jsonschema.Schema{
AnyOf: []jsonschema.Schema{ // OneOf is used because we don't expect more than 1 match and schema-based auto-complete works better with OneOf
OneOf: []jsonschema.Schema{
s, s,
{ {
Type: jsonschema.StringType, Type: jsonschema.StringType,
@ -55,7 +57,7 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
// primitives can have variable values, or references like ${bundle.xyz} // primitives can have variable values, or references like ${bundle.xyz}
// or ${workspace.xyz} // or ${workspace.xyz}
return jsonschema.Schema{ return jsonschema.Schema{
AnyOf: []jsonschema.Schema{ OneOf: []jsonschema.Schema{
s, s,
{Type: jsonschema.StringType, Pattern: interpolationPattern("resources")}, {Type: jsonschema.StringType, Pattern: interpolationPattern("resources")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")}, {Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},
@ -113,37 +115,60 @@ func makeVolumeTypeOptional(typ reflect.Type, s jsonschema.Schema) jsonschema.Sc
} }
func main() { func main() {
if len(os.Args) != 2 { if len(os.Args) != 3 {
fmt.Println("Usage: go run main.go <output-file>") fmt.Println("Usage: go run main.go <work-dir> <output-file>")
os.Exit(1) os.Exit(1)
} }
// Directory with annotation files
workdir := os.Args[1]
// Output file, where the generated JSON schema will be written to. // Output file, where the generated JSON schema will be written to.
outputFile := os.Args[1] outputFile := os.Args[2]
generateSchema(workdir, outputFile)
}
func generateSchema(workdir, outputFile string) {
annotationsPath := filepath.Join(workdir, "annotations.yml")
annotationsOpenApiPath := filepath.Join(workdir, "annotations_openapi.yml")
annotationsOpenApiOverridesPath := filepath.Join(workdir, "annotations_openapi_overrides.yml")
// Input file, the databricks openapi spec. // Input file, the databricks openapi spec.
inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC") inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC")
if inputFile == "" { if inputFile != "" {
log.Fatal("DATABRICKS_OPENAPI_SPEC environment variable not set") p, err := newParser(inputFile)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Writing OpenAPI annotations to %s\n", annotationsOpenApiPath)
err = p.extractAnnotations(reflect.TypeOf(config.Root{}), annotationsOpenApiPath, annotationsOpenApiOverridesPath)
if err != nil {
log.Fatal(err)
}
} }
p, err := newParser(inputFile) a, err := newAnnotationHandler([]string{annotationsOpenApiPath, annotationsOpenApiOverridesPath, annotationsPath})
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
// Generate the JSON schema from the bundle Go struct. // Generate the JSON schema from the bundle Go struct.
s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{ s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
p.addDescriptions,
p.addEnums,
removeJobsFields, removeJobsFields,
makeVolumeTypeOptional, makeVolumeTypeOptional,
a.addAnnotations,
addInterpolationPatterns, addInterpolationPatterns,
}) })
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
// Overwrite the input annotation file, adding missing annotations
err = a.syncWithMissingAnnotations(annotationsPath)
if err != nil {
log.Fatal(err)
}
b, err := json.MarshalIndent(s, "", " ") b, err := json.MarshalIndent(s, "", " ")
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)

View File

@ -0,0 +1,125 @@
package main
import (
"bytes"
"fmt"
"io"
"os"
"path"
"reflect"
"strings"
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/jsonschema"
"github.com/ghodss/yaml"
"github.com/stretchr/testify/assert"
)
func copyFile(src, dst string) error {
in, err := os.Open(src)
if err != nil {
return err
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, in)
if err != nil {
return err
}
return out.Close()
}
// Checks whether descriptions are added for new config fields in the annotations.yml file
// If this test fails either manually add descriptions to the `annotations.yml` or do the following:
// 1. run `make schema` from the repository root to add placeholder descriptions
// 2. replace all "PLACEHOLDER" values with the actual descriptions if possible
// 3. run `make schema` again to regenerate the schema with acutal descriptions
func TestRequiredAnnotationsForNewFields(t *testing.T) {
workdir := t.TempDir()
annotationsPath := path.Join(workdir, "annotations.yml")
annotationsOpenApiPath := path.Join(workdir, "annotations_openapi.yml")
annotationsOpenApiOverridesPath := path.Join(workdir, "annotations_openapi_overrides.yml")
// Copy existing annotation files from the same folder as this test
err := copyFile("annotations.yml", annotationsPath)
assert.NoError(t, err)
err = copyFile("annotations_openapi.yml", annotationsOpenApiPath)
assert.NoError(t, err)
err = copyFile("annotations_openapi_overrides.yml", annotationsOpenApiOverridesPath)
assert.NoError(t, err)
generateSchema(workdir, path.Join(t.TempDir(), "schema.json"))
originalFile, err := os.ReadFile("annotations.yml")
assert.NoError(t, err)
currentFile, err := os.ReadFile(annotationsPath)
assert.NoError(t, err)
original, err := yamlloader.LoadYAML("", bytes.NewBuffer(originalFile))
assert.NoError(t, err)
current, err := yamlloader.LoadYAML("", bytes.NewBuffer(currentFile))
assert.NoError(t, err)
// Collect added paths.
var updatedFieldPaths []string
_, err = merge.Override(original, current, merge.OverrideVisitor{
VisitInsert: func(basePath dyn.Path, right dyn.Value) (dyn.Value, error) {
updatedFieldPaths = append(updatedFieldPaths, basePath.String())
return right, nil
},
})
assert.NoError(t, err)
assert.Empty(t, updatedFieldPaths, fmt.Sprintf("Missing JSON-schema descriptions for new config fields in bundle/internal/schema/annotations.yml:\n%s", strings.Join(updatedFieldPaths, "\n")))
}
// Checks whether types in annotation files are still present in Config type
func TestNoDetachedAnnotations(t *testing.T) {
files := []string{
"annotations.yml",
"annotations_openapi.yml",
"annotations_openapi_overrides.yml",
}
types := map[string]bool{}
for _, file := range files {
annotations, err := getAnnotations(file)
assert.NoError(t, err)
for k := range annotations {
types[k] = false
}
}
_, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
delete(types, getPath(typ))
return s
},
})
assert.NoError(t, err)
for typ := range types {
t.Errorf("Type `%s` in annotations file is not found in `root.Config` type", typ)
}
assert.Empty(t, types, "Detached annotations found, regenerate schema and check for package path changes")
}
func getAnnotations(path string) (annotationFile, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var data annotationFile
err = yaml.Unmarshal(b, &data)
return data, err
}

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"os" "os"
@ -8,6 +9,9 @@ import (
"reflect" "reflect"
"strings" "strings"
"github.com/ghodss/yaml"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/jsonschema" "github.com/databricks/cli/libs/jsonschema"
) )
@ -23,6 +27,8 @@ type openapiParser struct {
ref map[string]jsonschema.Schema ref map[string]jsonschema.Schema
} }
const RootTypeKey = "_"
func newParser(path string) (*openapiParser, error) { func newParser(path string) (*openapiParser, error) {
b, err := os.ReadFile(path) b, err := os.ReadFile(path)
if err != nil { if err != nil {
@ -89,35 +95,95 @@ func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
} }
// Use the OpenAPI spec to load descriptions for the given type. // Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) addDescriptions(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema { func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error {
ref, ok := p.findRef(typ) annotations := annotationFile{}
if !ok { overrides := annotationFile{}
return s
b, err := os.ReadFile(overridesPath)
if err != nil {
return err
}
err = yaml.Unmarshal(b, &overrides)
if err != nil {
return err
}
if overrides == nil {
overrides = annotationFile{}
} }
s.Description = ref.Description _, err = jsonschema.FromType(typ, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
for k, v := range s.Properties { func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
if refProp, ok := ref.Properties[k]; ok { ref, ok := p.findRef(typ)
v.Description = refProp.Description if !ok {
} return s
}
basePath := getPath(typ)
pkg := map[string]annotation{}
annotations[basePath] = pkg
if ref.Description != "" || ref.Enum != nil {
pkg[RootTypeKey] = annotation{Description: ref.Description, Enum: ref.Enum}
}
for k := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
pkg[k] = annotation{Description: refProp.Description, Enum: refProp.Enum}
if refProp.Description == "" {
addEmptyOverride(k, basePath, overrides)
}
} else {
addEmptyOverride(k, basePath, overrides)
}
}
return s
},
})
if err != nil {
return err
} }
return s b, err = yaml.Marshal(overrides)
if err != nil {
return err
}
o, err := yamlloader.LoadYAML("", bytes.NewBuffer(b))
if err != nil {
return err
}
err = saveYamlWithStyle(overridesPath, o)
if err != nil {
return err
}
b, err = yaml.Marshal(annotations)
if err != nil {
return err
}
b = bytes.Join([][]byte{[]byte("# This file is auto-generated. DO NOT EDIT."), b}, []byte("\n"))
err = os.WriteFile(outputPath, b, 0o644)
if err != nil {
return err
}
return nil
} }
// Use the OpenAPI spec add enum values for the given type. func addEmptyOverride(key, pkg string, overridesFile annotationFile) {
func (p *openapiParser) addEnums(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema { if overridesFile[pkg] == nil {
ref, ok := p.findRef(typ) overridesFile[pkg] = map[string]annotation{}
}
overrides := overridesFile[pkg]
if overrides[key].Description == "" {
overrides[key] = annotation{Description: Placeholder}
}
a, ok := overrides[key]
if !ok { if !ok {
return s a = annotation{}
} }
if a.Description == "" {
s.Enum = append(s.Enum, ref.Enum...) a.Description = Placeholder
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Enum = append(v.Enum, refProp.Enum...)
}
} }
overrides[key] = a
return s
} }

View File

@ -0,0 +1,5 @@
targets:
production:
variables:
myvar:
default: true

View File

@ -51,6 +51,10 @@ var (
CAN_MANAGE: "CAN_MANAGE", CAN_MANAGE: "CAN_MANAGE",
CAN_VIEW: "CAN_READ", CAN_VIEW: "CAN_READ",
}, },
"apps": {
CAN_MANAGE: "CAN_MANAGE",
CAN_VIEW: "CAN_USE",
},
} }
) )

View File

@ -58,6 +58,10 @@ func TestApplyBundlePermissions(t *testing.T) {
"dashboard_1": {}, "dashboard_1": {},
"dashboard_2": {}, "dashboard_2": {},
}, },
Apps: map[string]*resources.App{
"app_1": {},
"app_2": {},
},
}, },
}, },
} }
@ -114,6 +118,10 @@ func TestApplyBundlePermissions(t *testing.T) {
require.Len(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, 2) require.Len(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, 2)
require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"}) require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"})
require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_READ", GroupName: "TestGroup"}) require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_READ", GroupName: "TestGroup"})
require.Len(t, b.Config.Resources.Apps["app_1"].Permissions, 2)
require.Contains(t, b.Config.Resources.Apps["app_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"})
require.Contains(t, b.Config.Resources.Apps["app_1"].Permissions, resources.Permission{Level: "CAN_USE", GroupName: "TestGroup"})
} }
func TestWarningOnOverlapPermission(t *testing.T) { func TestWarningOnOverlapPermission(t *testing.T) {

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/apps"
"github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
@ -135,6 +136,8 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
bundle.Seq( bundle.Seq(
terraform.StatePush(), terraform.StatePush(),
terraform.Load(), terraform.Load(),
apps.InterpolateVariables(),
apps.UploadConfig(),
metadata.Compute(), metadata.Compute(),
metadata.Upload(), metadata.Upload(),
bundle.LogString("Deployment complete!"), bundle.LogString("Deployment complete!"),

View File

@ -2,6 +2,7 @@ package phases
import ( import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/apps"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
@ -37,6 +38,8 @@ func Initialize() bundle.Mutator {
mutator.MergeJobParameters(), mutator.MergeJobParameters(),
mutator.MergeJobTasks(), mutator.MergeJobTasks(),
mutator.MergePipelineClusters(), mutator.MergePipelineClusters(),
mutator.MergeApps(),
mutator.InitializeWorkspaceClient(), mutator.InitializeWorkspaceClient(),
mutator.PopulateCurrentUser(), mutator.PopulateCurrentUser(),
mutator.LoadGitDetails(), mutator.LoadGitDetails(),
@ -80,6 +83,8 @@ func Initialize() bundle.Mutator {
mutator.TranslatePaths(), mutator.TranslatePaths(),
trampoline.WrapperWarning(), trampoline.WrapperWarning(),
apps.Validate(),
permissions.ValidateSharedRootPermissions(), permissions.ValidateSharedRootPermissions(),
permissions.ApplyBundlePermissions(), permissions.ApplyBundlePermissions(),
permissions.FilterCurrentUser(), permissions.FilterCurrentUser(),

211
bundle/run/app.go Normal file
View File

@ -0,0 +1,211 @@
package run
import (
"context"
"fmt"
"time"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
)
func logProgress(ctx context.Context, msg string) {
if msg == "" {
return
}
cmdio.LogString(ctx, fmt.Sprintf("✓ %s", msg))
}
type appRunner struct {
key
bundle *bundle.Bundle
app *resources.App
}
func (a *appRunner) Name() string {
if a.app == nil {
return ""
}
return a.app.Name
}
func isAppStopped(app *apps.App) bool {
return app.ComputeStatus == nil ||
(app.ComputeStatus.State == apps.ComputeStateStopped || app.ComputeStatus.State == apps.ComputeStateError)
}
func (a *appRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) {
app := a.app
b := a.bundle
if app == nil {
return nil, fmt.Errorf("app is not defined")
}
logProgress(ctx, fmt.Sprintf("Getting the status of the app %s", app.Name))
w := b.WorkspaceClient()
// Check the status of the app first.
createdApp, err := w.Apps.Get(ctx, apps.GetAppRequest{Name: app.Name})
if err != nil {
return nil, err
}
if createdApp.AppStatus != nil {
logProgress(ctx, fmt.Sprintf("App is in %s state", createdApp.AppStatus.State))
}
if createdApp.ComputeStatus != nil {
logProgress(ctx, fmt.Sprintf("App compute is in %s state", createdApp.ComputeStatus.State))
}
// There could be 2 reasons why the app is not running:
// 1. The app is new and was never deployed yet.
// 2. The app was stopped (compute not running).
// We need to start the app only if the compute is not running.
if isAppStopped(createdApp) {
err := a.start(ctx)
if err != nil {
return nil, err
}
}
// Deploy the app.
err = a.deploy(ctx)
if err != nil {
return nil, err
}
cmdio.LogString(ctx, fmt.Sprintf("You can access the app at %s", createdApp.Url))
return nil, nil
}
func (a *appRunner) start(ctx context.Context) error {
app := a.app
b := a.bundle
w := b.WorkspaceClient()
logProgress(ctx, fmt.Sprintf("Starting the app %s", app.Name))
wait, err := w.Apps.Start(ctx, apps.StartAppRequest{Name: app.Name})
if err != nil {
return err
}
startedApp, err := wait.OnProgress(func(p *apps.App) {
if p.AppStatus == nil {
return
}
logProgress(ctx, "App is starting...")
}).Get()
if err != nil {
return err
}
// After the app is started (meaning the compute is running), the API will return the app object with the
// active and pending deployments fields (if any). If there are active or pending deployments,
// we need to wait for them to complete before we can do the new deployment.
// Otherwise, the new deployment will fail.
// Thus, we first wait for the active deployment to complete.
if startedApp.ActiveDeployment != nil &&
startedApp.ActiveDeployment.Status.State == apps.AppDeploymentStateInProgress {
logProgress(ctx, "Waiting for the active deployment to complete...")
_, err = w.Apps.WaitGetDeploymentAppSucceeded(ctx, app.Name, startedApp.ActiveDeployment.DeploymentId, 20*time.Minute, nil)
if err != nil {
return err
}
logProgress(ctx, "Active deployment is completed!")
}
// Then, we wait for the pending deployment to complete.
if startedApp.PendingDeployment != nil &&
startedApp.PendingDeployment.Status.State == apps.AppDeploymentStateInProgress {
logProgress(ctx, "Waiting for the pending deployment to complete...")
_, err = w.Apps.WaitGetDeploymentAppSucceeded(ctx, app.Name, startedApp.PendingDeployment.DeploymentId, 20*time.Minute, nil)
if err != nil {
return err
}
logProgress(ctx, "Pending deployment is completed!")
}
logProgress(ctx, "App is started!")
return nil
}
func (a *appRunner) deploy(ctx context.Context) error {
app := a.app
b := a.bundle
w := b.WorkspaceClient()
wait, err := w.Apps.Deploy(ctx, apps.CreateAppDeploymentRequest{
AppName: app.Name,
AppDeployment: &apps.AppDeployment{
Mode: apps.AppDeploymentModeSnapshot,
SourceCodePath: app.SourceCodePath,
},
})
// If deploy returns an error, then there's an active deployment in progress, wait for it to complete.
if err != nil {
return err
}
_, err = wait.OnProgress(func(ad *apps.AppDeployment) {
if ad.Status == nil {
return
}
logProgress(ctx, ad.Status.Message)
}).Get()
if err != nil {
return err
}
return nil
}
func (a *appRunner) Cancel(ctx context.Context) error {
// We should cancel the app by stopping it.
app := a.app
b := a.bundle
if app == nil {
return fmt.Errorf("app is not defined")
}
w := b.WorkspaceClient()
logProgress(ctx, fmt.Sprintf("Stopping app %s", app.Name))
wait, err := w.Apps.Stop(ctx, apps.StopAppRequest{Name: app.Name})
if err != nil {
return err
}
_, err = wait.OnProgress(func(p *apps.App) {
if p.AppStatus == nil {
return
}
logProgress(ctx, p.AppStatus.Message)
}).Get()
logProgress(ctx, "App is stopped!")
return err
}
func (a *appRunner) Restart(ctx context.Context, opts *Options) (output.RunOutput, error) {
// We should restart the app by just running it again meaning a new app deployment will be done.
return a.Run(ctx, opts)
}
func (a *appRunner) ParseArgs(args []string, opts *Options) error {
if len(args) == 0 {
return nil
}
return fmt.Errorf("received %d unexpected positional arguments", len(args))
}
func (a *appRunner) CompleteArgs(args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return nil, cobra.ShellCompDirectiveNoFileComp
}

216
bundle/run/app_test.go Normal file
View File

@ -0,0 +1,216 @@
package run
import (
"bytes"
"context"
"os"
"path/filepath"
"testing"
"time"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)
type testAppRunner struct {
m *mocks.MockWorkspaceClient
b *bundle.Bundle
ctx context.Context
}
func (ta *testAppRunner) run(t *testing.T) {
r := appRunner{
key: "my_app",
bundle: ta.b,
app: ta.b.Config.Resources.Apps["my_app"],
}
_, err := r.Run(ta.ctx, &Options{})
require.NoError(t, err)
}
func setupBundle(t *testing.T) (context.Context, *bundle.Bundle, *mocks.MockWorkspaceClient) {
root := t.TempDir()
err := os.MkdirAll(filepath.Join(root, "my_app"), 0o700)
require.NoError(t, err)
b := &bundle.Bundle{
BundleRootPath: root,
SyncRoot: vfs.MustNew(root),
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Users/foo@bar.com/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app": {
App: &apps.App{
Name: "my_app",
},
SourceCodePath: "./my_app",
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "MY_APP", "value": "my value"},
},
},
},
},
},
},
}
mwc := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(mwc.WorkspaceClient)
bundletest.SetLocation(b, "resources.apps.my_app", []dyn.Location{{File: "./databricks.yml"}})
ctx := context.Background()
ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "..."))
ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend))
diags := bundle.Apply(ctx, b, bundle.Seq(
mutator.DefineDefaultWorkspacePaths(),
mutator.TranslatePaths(),
))
require.Empty(t, diags)
return ctx, b, mwc
}
func setupTestApp(t *testing.T, initialAppState apps.ApplicationState, initialComputeState apps.ComputeState) *testAppRunner {
ctx, b, mwc := setupBundle(t)
appApi := mwc.GetMockAppsAPI()
appApi.EXPECT().Get(mock.Anything, apps.GetAppRequest{
Name: "my_app",
}).Return(&apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: initialAppState,
},
ComputeStatus: &apps.ComputeStatus{
State: initialComputeState,
},
}, nil)
wait := &apps.WaitGetDeploymentAppSucceeded[apps.AppDeployment]{
Poll: func(_ time.Duration, _ func(*apps.AppDeployment)) (*apps.AppDeployment, error) {
return nil, nil
},
}
appApi.EXPECT().Deploy(mock.Anything, apps.CreateAppDeploymentRequest{
AppName: "my_app",
AppDeployment: &apps.AppDeployment{
Mode: apps.AppDeploymentModeSnapshot,
SourceCodePath: "/Workspace/Users/foo@bar.com/files/my_app",
},
}).Return(wait, nil)
return &testAppRunner{
m: mwc,
b: b,
ctx: ctx,
}
}
func TestAppRunStartedApp(t *testing.T) {
r := setupTestApp(t, apps.ApplicationStateRunning, apps.ComputeStateActive)
r.run(t)
}
func TestAppRunStoppedApp(t *testing.T) {
r := setupTestApp(t, apps.ApplicationStateCrashed, apps.ComputeStateStopped)
appsApi := r.m.GetMockAppsAPI()
appsApi.EXPECT().Start(mock.Anything, apps.StartAppRequest{
Name: "my_app",
}).Return(&apps.WaitGetAppActive[apps.App]{
Poll: func(_ time.Duration, _ func(*apps.App)) (*apps.App, error) {
return &apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: apps.ApplicationStateRunning,
},
ComputeStatus: &apps.ComputeStatus{
State: apps.ComputeStateActive,
},
}, nil
},
}, nil)
r.run(t)
}
func TestAppRunWithAnActiveDeploymentInProgress(t *testing.T) {
r := setupTestApp(t, apps.ApplicationStateCrashed, apps.ComputeStateStopped)
appsApi := r.m.GetMockAppsAPI()
appsApi.EXPECT().Start(mock.Anything, apps.StartAppRequest{
Name: "my_app",
}).Return(&apps.WaitGetAppActive[apps.App]{
Poll: func(_ time.Duration, _ func(*apps.App)) (*apps.App, error) {
return &apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: apps.ApplicationStateRunning,
},
ComputeStatus: &apps.ComputeStatus{
State: apps.ComputeStateActive,
},
ActiveDeployment: &apps.AppDeployment{
DeploymentId: "active_deployment_id",
Status: &apps.AppDeploymentStatus{
State: apps.AppDeploymentStateInProgress,
},
},
PendingDeployment: &apps.AppDeployment{
DeploymentId: "pending_deployment_id",
Status: &apps.AppDeploymentStatus{
State: apps.AppDeploymentStateCancelled,
},
},
}, nil
},
}, nil)
appsApi.EXPECT().WaitGetDeploymentAppSucceeded(mock.Anything, "my_app", "active_deployment_id", mock.Anything, mock.Anything).Return(nil, nil)
r.run(t)
}
func TestStopApp(t *testing.T) {
ctx, b, mwc := setupBundle(t)
appsApi := mwc.GetMockAppsAPI()
appsApi.EXPECT().Stop(mock.Anything, apps.StopAppRequest{
Name: "my_app",
}).Return(&apps.WaitGetAppStopped[apps.App]{
Poll: func(_ time.Duration, _ func(*apps.App)) (*apps.App, error) {
return &apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: apps.ApplicationStateUnavailable,
},
}, nil
},
}, nil)
r := appRunner{
key: "my_app",
bundle: b,
app: b.Config.Resources.Apps["my_app"],
}
err := r.Cancel(ctx)
require.NoError(t, err)
}

View File

@ -42,7 +42,7 @@ type Runner interface {
// IsRunnable returns a filter that only allows runnable resources. // IsRunnable returns a filter that only allows runnable resources.
func IsRunnable(ref refs.Reference) bool { func IsRunnable(ref refs.Reference) bool {
switch ref.Resource.(type) { switch ref.Resource.(type) {
case *resources.Job, *resources.Pipeline: case *resources.Job, *resources.Pipeline, *resources.App:
return true return true
default: default:
return false return false
@ -56,6 +56,12 @@ func ToRunner(b *bundle.Bundle, ref refs.Reference) (Runner, error) {
return &jobRunner{key: key(ref.KeyWithType), bundle: b, job: resource}, nil return &jobRunner{key: key(ref.KeyWithType), bundle: b, job: resource}, nil
case *resources.Pipeline: case *resources.Pipeline:
return &pipelineRunner{key: key(ref.KeyWithType), bundle: b, pipeline: resource}, nil return &pipelineRunner{key: key(ref.KeyWithType), bundle: b, pipeline: resource}, nil
case *resources.App:
return &appRunner{
key: key(ref.KeyWithType),
bundle: b,
app: resource,
}, nil
default: default:
return nil, fmt.Errorf("unsupported resource type: %T", resource) return nil, fmt.Errorf("unsupported resource type: %T", resource)
} }

View File

@ -41,21 +41,21 @@ func TestJsonSchema(t *testing.T) {
resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job") resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job")
fields := []string{"name", "continuous", "tasks", "trigger"} fields := []string{"name", "continuous", "tasks", "trigger"}
for _, field := range fields { for _, field := range fields {
assert.NotEmpty(t, resourceJob.AnyOf[0].Properties[field].Description) assert.NotEmpty(t, resourceJob.OneOf[0].Properties[field].Description)
} }
// Assert descriptions were also loaded for a job task definition. // Assert descriptions were also loaded for a job task definition.
jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task") jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task")
fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"} fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"}
for _, field := range fields { for _, field := range fields {
assert.NotEmpty(t, jobTask.AnyOf[0].Properties[field].Description) assert.NotEmpty(t, jobTask.OneOf[0].Properties[field].Description)
} }
// Assert descriptions are loaded for pipelines // Assert descriptions are loaded for pipelines
pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline") pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline")
fields = []string{"name", "catalog", "clusters", "channel", "continuous", "development"} fields = []string{"name", "catalog", "clusters", "channel", "continuous", "development"}
for _, field := range fields { for _, field := range fields {
assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description) assert.NotEmpty(t, pipeline.OneOf[0].Properties[field].Description)
} }
providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider") providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider")

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,71 @@
bundle:
name: apps
workspace:
host: https://acme.cloud.databricks.com/
variables:
app_config:
type: complex
default:
command:
- "python"
- "app.py"
env:
- name: SOME_ENV_VARIABLE
value: "Some value"
resources:
apps:
my_app:
name: "my-app"
description: "My App"
source_code_path: ./app
config: ${var.app_config}
resources:
- name: "my-sql-warehouse"
sql_warehouse:
id: 1234
permission: "CAN_USE"
- name: "my-job"
job:
id: 5678
permission: "CAN_MANAGE_RUN"
permissions:
- user_name: "foo@bar.com"
level: "CAN_VIEW"
- service_principal_name: "my_sp"
level: "CAN_MANAGE"
targets:
default:
development:
variables:
app_config:
command:
- "python"
- "dev.py"
env:
- name: SOME_ENV_VARIABLE_2
value: "Some value 2"
resources:
apps:
my_app:
source_code_path: ./app-dev
resources:
- name: "my-sql-warehouse"
sql_warehouse:
id: 1234
permission: "CAN_MANAGE"
- name: "my-job"
job:
id: 5678
permission: "CAN_MANAGE"
- name: "my-secret"
secret:
key: "key"
scope: "scope"
permission: "CAN_USE"

60
bundle/tests/apps_test.go Normal file
View File

@ -0,0 +1,60 @@
package config_tests
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
)
func TestApps(t *testing.T) {
b := load(t, "./apps")
assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b,
bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"),
))
assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"]
assert.Equal(t, "my-app", app.Name)
assert.Equal(t, "My App", app.Description)
assert.Equal(t, []any{"python", "app.py"}, app.Config["command"])
assert.Equal(t, []any{map[string]any{"name": "SOME_ENV_VARIABLE", "value": "Some value"}}, app.Config["env"])
assert.Len(t, app.Resources, 2)
assert.Equal(t, "1234", app.Resources[0].SqlWarehouse.Id)
assert.Equal(t, "CAN_USE", string(app.Resources[0].SqlWarehouse.Permission))
assert.Equal(t, "5678", app.Resources[1].Job.Id)
assert.Equal(t, "CAN_MANAGE_RUN", string(app.Resources[1].Job.Permission))
}
func TestAppsOverride(t *testing.T) {
b := loadTarget(t, "./apps", "development")
assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b,
bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"),
))
assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"]
assert.Equal(t, "my-app", app.Name)
assert.Equal(t, "My App", app.Description)
assert.Equal(t, []any{"python", "dev.py"}, app.Config["command"])
assert.Equal(t, []any{map[string]any{"name": "SOME_ENV_VARIABLE_2", "value": "Some value 2"}}, app.Config["env"])
assert.Len(t, app.Resources, 3)
assert.Equal(t, "1234", app.Resources[0].SqlWarehouse.Id)
assert.Equal(t, "CAN_MANAGE", string(app.Resources[0].SqlWarehouse.Permission))
assert.Equal(t, "5678", app.Resources[1].Job.Id)
assert.Equal(t, "CAN_MANAGE", string(app.Resources[1].Job.Permission))
assert.Equal(t, "key", app.Resources[2].Secret.Key)
assert.Equal(t, "scope", app.Resources[2].Secret.Scope)
assert.Equal(t, "CAN_USE", string(app.Resources[2].Secret.Permission))
}

View File

@ -46,6 +46,7 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) {
mutator.MergeJobParameters(), mutator.MergeJobParameters(),
mutator.MergeJobTasks(), mutator.MergeJobTasks(),
mutator.MergePipelineClusters(), mutator.MergePipelineClusters(),
mutator.MergeApps(),
)) ))
return b, diags return b, diags
} }

View File

@ -17,6 +17,7 @@ func newGenerateCommand() *cobra.Command {
cmd.AddCommand(generate.NewGenerateJobCommand()) cmd.AddCommand(generate.NewGenerateJobCommand())
cmd.AddCommand(generate.NewGeneratePipelineCommand()) cmd.AddCommand(generate.NewGeneratePipelineCommand())
cmd.AddCommand(generate.NewGenerateDashboardCommand()) cmd.AddCommand(generate.NewGenerateDashboardCommand())
cmd.AddCommand(generate.NewGenerateAppCommand())
cmd.PersistentFlags().StringVar(&key, "key", "", `resource key to use for the generated configuration`) cmd.PersistentFlags().StringVar(&key, "key", "", `resource key to use for the generated configuration`)
return cmd return cmd
} }

166
cmd/bundle/generate/app.go Normal file
View File

@ -0,0 +1,166 @@
package generate
import (
"context"
"errors"
"fmt"
"io"
"io/fs"
"path/filepath"
"github.com/databricks/cli/bundle/config/generate"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/yamlsaver"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/textutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
"gopkg.in/yaml.v3"
)
func NewGenerateAppCommand() *cobra.Command {
var configDir string
var sourceDir string
var appName string
var force bool
cmd := &cobra.Command{
Use: "app",
Short: "Generate bundle configuration for a Databricks app",
}
cmd.Flags().StringVar(&appName, "existing-app-name", "", `App name to generate config for`)
cmd.MarkFlagRequired("existing-app-name")
cmd.Flags().StringVarP(&configDir, "config-dir", "d", filepath.Join("resources"), `Directory path where the output bundle config will be stored`)
cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", filepath.Join("src", "app"), `Directory path where the app files will be stored`)
cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`)
cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
b, diags := root.MustConfigureBundle(cmd)
if err := diags.Error(); err != nil {
return diags.Error()
}
w := b.WorkspaceClient()
cmdio.LogString(ctx, fmt.Sprintf("Loading app '%s' configuration", appName))
app, err := w.Apps.Get(ctx, apps.GetAppRequest{Name: appName})
if err != nil {
return err
}
// Making sure the config directory and source directory are absolute paths.
if !filepath.IsAbs(configDir) {
configDir = filepath.Join(b.BundleRootPath, configDir)
}
if !filepath.IsAbs(sourceDir) {
sourceDir = filepath.Join(b.BundleRootPath, sourceDir)
}
downloader := newDownloader(w, sourceDir, configDir)
sourceCodePath := app.DefaultSourceCodePath
err = downloader.markDirectoryForDownload(ctx, &sourceCodePath)
if err != nil {
return err
}
appConfig, err := getAppConfig(ctx, app, w)
if err != nil {
return fmt.Errorf("failed to get app config: %w", err)
}
// Making sure the source code path is relative to the config directory.
rel, err := filepath.Rel(configDir, sourceDir)
if err != nil {
return err
}
v, err := generate.ConvertAppToValue(app, filepath.ToSlash(rel), appConfig)
if err != nil {
return err
}
appKey := cmd.Flag("key").Value.String()
if appKey == "" {
appKey = textutil.NormalizeString(app.Name)
}
result := map[string]dyn.Value{
"resources": dyn.V(map[string]dyn.Value{
"apps": dyn.V(map[string]dyn.Value{
appKey: v,
}),
}),
}
// If there are app.yaml or app.yml files in the source code path, they will be downloaded but we don't want to include them in the bundle.
// We include this configuration inline, so we need to remove these files.
for _, configFile := range []string{"app.yml", "app.yaml"} {
delete(downloader.files, filepath.Join(sourceDir, configFile))
}
err = downloader.FlushToDisk(ctx, force)
if err != nil {
return err
}
filename := filepath.Join(configDir, fmt.Sprintf("%s.app.yml", appKey))
saver := yamlsaver.NewSaver()
err = saver.SaveAsYAML(result, filename, force)
if err != nil {
return err
}
cmdio.LogString(ctx, fmt.Sprintf("App configuration successfully saved to %s", filename))
return nil
}
return cmd
}
func getAppConfig(ctx context.Context, app *apps.App, w *databricks.WorkspaceClient) (map[string]any, error) {
sourceCodePath := app.DefaultSourceCodePath
f, err := filer.NewWorkspaceFilesClient(w, sourceCodePath)
if err != nil {
return nil, err
}
// The app config is stored in app.yml or app.yaml file in the source code path.
configFileNames := []string{"app.yml", "app.yaml"}
for _, configFile := range configFileNames {
r, err := f.Read(ctx, configFile)
if err != nil {
if errors.Is(err, fs.ErrNotExist) {
continue
}
return nil, err
}
defer r.Close()
cmdio.LogString(ctx, fmt.Sprintf("Reading app configuration from %s", configFile))
content, err := io.ReadAll(r)
if err != nil {
return nil, err
}
var appConfig map[string]any
err = yaml.Unmarshal(content, &appConfig)
if err != nil {
cmdio.LogString(ctx, fmt.Sprintf("Failed to parse app configuration:\n%s\nerr: %v", string(content), err))
return nil, nil
}
return appConfig, nil
}
return nil, nil
}

View File

@ -13,6 +13,7 @@ import (
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/databricks/databricks-sdk-go/service/workspace"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
@ -63,6 +64,37 @@ func (n *downloader) markFileForDownload(ctx context.Context, filePath *string)
return nil return nil
} }
func (n *downloader) markDirectoryForDownload(ctx context.Context, dirPath *string) error {
_, err := n.w.Workspace.GetStatusByPath(ctx, *dirPath)
if err != nil {
return err
}
objects, err := n.w.Workspace.RecursiveList(ctx, *dirPath)
if err != nil {
return err
}
for _, obj := range objects {
if obj.ObjectType == workspace.ObjectTypeDirectory {
continue
}
err := n.markFileForDownload(ctx, &obj.Path)
if err != nil {
return err
}
}
rel, err := filepath.Rel(n.configDir, n.sourceDir)
if err != nil {
return err
}
*dirPath = rel
return nil
}
func (n *downloader) markNotebookForDownload(ctx context.Context, notebookPath *string) error { func (n *downloader) markNotebookForDownload(ctx context.Context, notebookPath *string) error {
info, err := n.w.Workspace.GetStatusByPath(ctx, *notebookPath) info, err := n.w.Workspace.GetStatusByPath(ctx, *notebookPath)
if err != nil { if err != nil {

View File

@ -4,7 +4,7 @@ import (
"encoding/base64" "encoding/base64"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"

View File

@ -0,0 +1,99 @@
package bundle_test
import (
"fmt"
"io"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
func TestDeployBundleWithApp(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
uniqueId := uuid.New().String()
appId := fmt.Sprintf("app-%s", uuid.New().String()[0:8])
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
root := initTestTemplate(t, ctx, "apps", map[string]any{
"unique_id": uniqueId,
"app_id": appId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
"instance_pool_id": instancePoolId,
})
t.Cleanup(func() {
destroyBundle(t, ctx, root)
app, err := wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: "test-app"})
if err != nil {
require.ErrorContains(t, err, "does not exist")
} else {
require.Contains(t, []apps.ApplicationState{apps.ApplicationStateUnavailable}, app.AppStatus.State)
}
})
deployBundle(t, ctx, root)
// App should exists after bundle deployment
app, err := wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: appId})
require.NoError(t, err)
require.NotNil(t, app)
// Check app config
currentUser, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
pathToAppYml := fmt.Sprintf("/Workspace/Users/%s/.bundle/%s/files/app/app.yml", currentUser.UserName, uniqueId)
reader, err := wt.W.Workspace.Download(ctx, pathToAppYml)
require.NoError(t, err)
data, err := io.ReadAll(reader)
require.NoError(t, err)
job, err := wt.W.Jobs.GetBySettingsName(ctx, fmt.Sprintf("test-job-with-cluster-%s", uniqueId))
require.NoError(t, err)
content := string(data)
require.Contains(t, content, fmt.Sprintf(`command:
- flask
- --app
- app
- run
env:
- name: JOB_ID
value: "%d"`, job.JobId))
// Try to run the app
_, out := runResourceWithStderr(t, ctx, root, "test_app")
require.Contains(t, out, app.Url)
// App should be in the running state
app, err = wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: appId})
require.NoError(t, err)
require.NotNil(t, app)
require.Equal(t, apps.ApplicationStateRunning, app.AppStatus.State)
// Stop the app
wait, err := wt.W.Apps.Stop(ctx, apps.StopAppRequest{Name: appId})
require.NoError(t, err)
app, err = wait.Get()
require.NoError(t, err)
require.NotNil(t, app)
require.Equal(t, apps.ApplicationStateUnavailable, app.AppStatus.State)
// Try to run the app again
_, out = runResourceWithStderr(t, ctx, root, "test_app")
require.Contains(t, out, app.Url)
// App should be in the running state
app, err = wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: appId})
require.NoError(t, err)
require.NotNil(t, app)
require.Equal(t, apps.ApplicationStateRunning, app.AppStatus.State)
}

View File

@ -12,7 +12,7 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"

View File

@ -5,7 +5,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"

View File

@ -6,7 +6,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"

View File

@ -0,0 +1,24 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for job name"
},
"app_id": {
"type": "string",
"description": "Unique ID for app name"
},
"spark_version": {
"type": "string",
"description": "Spark version used for job cluster"
},
"node_type_id": {
"type": "string",
"description": "Node type id for job cluster"
},
"instance_pool_id": {
"type": "string",
"description": "Instance pool id for job cluster"
}
}
}

View File

@ -0,0 +1,15 @@
import os
from databricks.sdk import WorkspaceClient
from flask import Flask
app = Flask(__name__)
@app.route("/")
def home():
job_id = os.getenv("JOB_ID")
w = WorkspaceClient()
job = w.jobs.get(job_id)
return job.settings.name

View File

@ -0,0 +1,42 @@
bundle:
name: basic
workspace:
root_path: "~/.bundle/{{.unique_id}}"
resources:
apps:
test_app:
name: "{{.app_id}}"
description: "App which manages job created by this bundle"
source_code_path: ./app
config:
command:
- flask
- --app
- app
- run
env:
- name: JOB_ID
value: ${resources.jobs.foo.id}
resources:
- name: "app-job"
description: "A job for app to be able to work with"
job:
id: ${resources.jobs.foo.id}
permission: "CAN_MANAGE_RUN"
jobs:
foo:
name: test-job-with-cluster-{{.unique_id}}
tasks:
- task_key: my_notebook_task
new_cluster:
num_workers: 1
spark_version: "{{.spark_version}}"
node_type_id: "{{.node_type_id}}"
data_security_mode: USER_ISOLATION
instance_pool_id: "{{.instance_pool_id}}"
spark_python_task:
python_file: ./hello_world.py

View File

@ -0,0 +1 @@
print("Hello World!")

View File

@ -4,7 +4,7 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/google/uuid" "github.com/google/uuid"
@ -12,12 +12,12 @@ import (
) )
func TestDeployBundleWithCluster(t *testing.T) { func TestDeployBundleWithCluster(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t) if testutil.GetCloud(t) == testutil.AWS {
if testutil.IsAWSCloud(wt) {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters") t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
} }
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID() nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String() uniqueId := uuid.New().String()
root := initTestTemplate(t, ctx, "clusters", map[string]any{ root := initTestTemplate(t, ctx, "clusters", map[string]any{
@ -44,6 +44,11 @@ func TestDeployBundleWithCluster(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, cluster) require.NotNil(t, cluster)
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, root, "foo") out, err := runResource(t, ctx, root, "foo")
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, out, "Hello World!") require.Contains(t, out, "Hello World!")

View File

@ -4,7 +4,7 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"

View File

@ -11,7 +11,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"

View File

@ -5,7 +5,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"

View File

@ -4,7 +4,7 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"

View File

@ -7,7 +7,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle/deploy" "github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/google/uuid" "github.com/google/uuid"

View File

@ -6,7 +6,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
"github.com/google/uuid" "github.com/google/uuid"

View File

@ -6,7 +6,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -3,7 +3,7 @@ package bundle_test
import ( import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -9,7 +9,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"

View File

@ -9,7 +9,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"

View File

@ -119,6 +119,17 @@ func runResource(t testutil.TestingT, ctx context.Context, path, key string) (st
return stdout.String(), err return stdout.String(), err
} }
func runResourceWithStderr(t testutil.TestingT, ctx context.Context, path, key string) (string, string) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
ctx = cmdio.NewContext(ctx, cmdio.Default())
c := testcli.NewRunner(t, ctx, "bundle", "run", key)
stdout, stderr, err := c.Run()
require.NoError(t, err)
return stdout.String(), stderr.String()
}
func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key string, params ...string) (string, error) { func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key string, params ...string) (string, error) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
ctx = cmdio.NewContext(ctx, cmdio.Default()) ctx = cmdio.NewContext(ctx, cmdio.Default())

View File

@ -11,7 +11,7 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/iamutil" "github.com/databricks/cli/libs/iamutil"

View File

@ -10,7 +10,7 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/metadata" "github.com/databricks/cli/bundle/metadata"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/google/uuid" "github.com/google/uuid"

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/listing" "github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"

View File

@ -3,7 +3,7 @@ package bundle_test
import ( import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/google/uuid" "github.com/google/uuid"
@ -29,6 +29,11 @@ func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonW
destroyBundle(t, ctx, bundleRoot) destroyBundle(t, ctx, bundleRoot)
}) })
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, bundleRoot, "some_other_job") out, err := runResource(t, ctx, bundleRoot, "some_other_job")
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, out, "Hello from my func") require.Contains(t, out, "Hello from my func")
@ -51,9 +56,7 @@ func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
} }
func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) { func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
_, wt := acc.WorkspaceTest(t) if testutil.GetCloud(t) == testutil.AWS {
if testutil.IsAWSCloud(wt) {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters") t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
} }

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/google/uuid" "github.com/google/uuid"
@ -30,6 +30,11 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, arti
destroyBundle(t, ctx, bundleRoot) destroyBundle(t, ctx, bundleRoot)
}) })
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, bundleRoot, "jar_job") out, err := runResource(t, ctx, bundleRoot, "jar_job")
require.NoError(t, err) require.NoError(t, err)
require.Contains(t, out, "Hello from Jar!") require.Contains(t, out, "Hello from Jar!")

View File

@ -6,7 +6,7 @@ import (
"regexp" "regexp"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/databricks-sdk-go/listing" "github.com/databricks/databricks-sdk-go/listing"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"

View File

@ -7,7 +7,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"

View File

@ -5,7 +5,7 @@ import (
"path" "path"
"path/filepath" "path/filepath"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"

View File

@ -6,7 +6,7 @@ import (
"strconv" "strconv"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"

View File

@ -6,7 +6,7 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"

View File

@ -3,7 +3,7 @@ package storage_credentials_test
import ( import (
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"

View File

@ -15,7 +15,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"

View File

@ -11,7 +11,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"

View File

@ -7,7 +7,7 @@ import (
"path" "path"
"path/filepath" "path/filepath"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"

View File

@ -8,7 +8,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/libs/dbr" "github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"

View File

@ -11,7 +11,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
lockpkg "github.com/databricks/cli/libs/locker" lockpkg "github.com/databricks/cli/libs/locker"

View File

@ -4,7 +4,7 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"

View File

@ -14,7 +14,7 @@ import (
"time" "time"
"github.com/databricks/cli/bundle/run/output" "github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"

View File

@ -31,7 +31,7 @@ function cli_snapshot_directory() {
dir="${dir}_386" dir="${dir}_386"
;; ;;
arm64|aarch64) arm64|aarch64)
dir="${dir}_arm64" dir="${dir}_arm64_v8.0"
;; ;;
armv7l|armv8l) armv7l|armv8l)
dir="${dir}_arm_6" dir="${dir}_arm_6"

View File

@ -58,7 +58,3 @@ func GetCloud(t TestingT) Cloud {
} }
return -1 return -1
} }
func IsAWSCloud(t TestingT) bool {
return GetCloud(t) == AWS
}

View File

@ -23,7 +23,7 @@ func RandomName(prefix ...string) string {
randLen := 12 randLen := 12
b := make([]byte, randLen) b := make([]byte, randLen)
for i := range b { for i := range b {
b[i] = charset[rand.Intn(randLen)] b[i] = charset[rand.Intn(len(charset))]
} }
if len(prefix) > 0 { if len(prefix) > 0 {
return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b) return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b)

View File

@ -7,7 +7,7 @@ type elementsByKey struct {
keyFunc func(dyn.Value) string keyFunc func(dyn.Value) string
} }
func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) { func (e elementsByKey) doMap(_ dyn.Path, v dyn.Value, mergeFunc func(a, b dyn.Value) (dyn.Value, error)) (dyn.Value, error) {
// We know the type of this value is a sequence. // We know the type of this value is a sequence.
// For additional defence, return self if it is not. // For additional defence, return self if it is not.
elements, ok := v.AsSequence() elements, ok := v.AsSequence()
@ -33,7 +33,7 @@ func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
} }
// Merge this instance into the reference. // Merge this instance into the reference.
nv, err := Merge(ref, elements[i]) nv, err := mergeFunc(ref, elements[i])
if err != nil { if err != nil {
return v, err return v, err
} }
@ -55,6 +55,26 @@ func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return dyn.NewValue(out, v.Locations()), nil return dyn.NewValue(out, v.Locations()), nil
} }
func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return e.doMap(nil, v, Merge)
}
func (e elementsByKey) MapWithOverride(p dyn.Path, v dyn.Value) (dyn.Value, error) {
return e.doMap(nil, v, func(a, b dyn.Value) (dyn.Value, error) {
return Override(a, b, OverrideVisitor{
VisitInsert: func(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return v, nil
},
VisitDelete: func(valuePath dyn.Path, left dyn.Value) error {
return nil
},
VisitUpdate: func(_ dyn.Path, a, b dyn.Value) (dyn.Value, error) {
return b, nil
},
})
})
}
// ElementsByKey returns a [dyn.MapFunc] that operates on a sequence // ElementsByKey returns a [dyn.MapFunc] that operates on a sequence
// where each element is a map. It groups elements by a key and merges // where each element is a map. It groups elements by a key and merges
// elements with the same key. // elements with the same key.
@ -65,3 +85,7 @@ func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
func ElementsByKey(key string, keyFunc func(dyn.Value) string) dyn.MapFunc { func ElementsByKey(key string, keyFunc func(dyn.Value) string) dyn.MapFunc {
return elementsByKey{key, keyFunc}.Map return elementsByKey{key, keyFunc}.Map
} }
func ElementsByKeyWithOverride(key string, keyFunc func(dyn.Value) string) dyn.MapFunc {
return elementsByKey{key, keyFunc}.MapWithOverride
}

Some files were not shown because too many files have changed in this diff Show More