Merge remote-tracking branch 'databricks/main' into cp-better-errors

This commit is contained in:
Lennart Kats 2024-07-06 22:01:43 +02:00
commit b384b36066
No known key found for this signature in database
GPG Key ID: 1EB8B57673197023
193 changed files with 6586 additions and 1289 deletions

View File

@ -1 +1 @@
7eb5ad9a2ed3e3f1055968a2d1014ac92c06fe92 7437dabb9dadee402c1fc060df4c1ce8cc5369f0

View File

@ -7,7 +7,7 @@ package account
import ( import (
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/spf13/cobra" "github.com/spf13/cobra"
{{range .Services}}{{if and .IsAccounts (not .HasParent)}}{{if not (in $excludes .KebabName) }} {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) }}
{{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}} {{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}}
) )
@ -17,7 +17,7 @@ func New() *cobra.Command {
Short: `Databricks Account Commands`, Short: `Databricks Account Commands`,
} }
{{range .Services}}{{if and .IsAccounts (not .HasParent)}}{{if not (in $excludes .KebabName) -}} {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) -}}
cmd.AddCommand({{.SnakeName}}.New()) cmd.AddCommand({{.SnakeName}}.New())
{{end}}{{end}}{{end}} {{end}}{{end}}{{end}}

View File

@ -14,14 +14,14 @@ package workspace
import ( import (
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
{{range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}{{if not (in $excludes .KebabName) }} {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) }}
{{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}} {{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}}
) )
func All() []*cobra.Command { func All() []*cobra.Command {
var out []*cobra.Command var out []*cobra.Command
{{range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}{{if not (in $excludes .KebabName) -}} {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) -}}
out = append(out, {{.SnakeName}}.New()) out = append(out, {{.SnakeName}}.New())
{{end}}{{end}}{{end}} {{end}}{{end}}{{end}}

View File

@ -22,6 +22,7 @@ import (
"dbsql-permissions" "dbsql-permissions"
"account-access-control-proxy" "account-access-control-proxy"
"files" "files"
"serving-endpoints-data-plane"
}} }}
{{if not (in $excludes .KebabName) }} {{if not (in $excludes .KebabName) }}

View File

@ -33,7 +33,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.21.x go-version: 1.22.x
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@ -68,7 +68,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.21.x go-version: 1.22.x
# No need to download cached dependencies when running gofmt. # No need to download cached dependencies when running gofmt.
cache: false cache: false
@ -100,7 +100,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.21.x go-version: 1.22.x
# Github repo: https://github.com/ajv-validator/ajv-cli # Github repo: https://github.com/ajv-validator/ajv-cli
- name: Install ajv-cli - name: Install ajv-cli

View File

@ -21,33 +21,41 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.21.x go-version: 1.22.x
# The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
# that runs unit tests. This job produces and uses a different cache.
cache-dependency-path: |
go.sum
.goreleaser.yaml
- name: Hide snapshot tag to outsmart GoReleaser - name: Hide snapshot tag to outsmart GoReleaser
run: git tag -d snapshot || true run: git tag -d snapshot || true
- name: Run GoReleaser - name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4 id: releaser
uses: goreleaser/goreleaser-action@v6
with: with:
version: latest version: ~> v2
args: release --snapshot --skip docker args: release --snapshot --skip docker
- name: Upload macOS binaries - name: Upload macOS binaries
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: cli_darwin_snapshot name: cli_darwin_snapshot
path: | path: |
dist/*_darwin_*/ dist/*_darwin_*/
- name: Upload Linux binaries - name: Upload Linux binaries
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: cli_linux_snapshot name: cli_linux_snapshot
path: | path: |
dist/*_linux_*/ dist/*_linux_*/
- name: Upload Windows binaries - name: Upload Windows binaries
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: cli_windows_snapshot name: cli_windows_snapshot
path: | path: |

View File

@ -22,7 +22,14 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.21.x go-version: 1.22.x
# The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
# that runs unit tests. This job produces and uses a different cache.
cache-dependency-path: |
go.sum
.goreleaser.yaml
# Log into the GitHub Container Registry. The goreleaser action will create # Log into the GitHub Container Registry. The goreleaser action will create
# the docker images and push them to the GitHub Container Registry. # the docker images and push them to the GitHub Container Registry.
@ -39,9 +46,9 @@ jobs:
- name: Run GoReleaser - name: Run GoReleaser
id: releaser id: releaser
uses: goreleaser/goreleaser-action@v4 uses: goreleaser/goreleaser-action@v6
with: with:
version: latest version: ~> v2
args: release args: release
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,6 +1,9 @@
version: 2
before: before:
hooks: hooks:
- go mod tidy - go mod download
builds: builds:
- env: - env:
- CGO_ENABLED=0 - CGO_ENABLED=0
@ -36,6 +39,7 @@ builds:
- amd64 - amd64
- arm64 - arm64
binary: databricks binary: databricks
archives: archives:
- format: zip - format: zip
@ -89,8 +93,10 @@ docker_manifests:
checksum: checksum:
name_template: 'databricks_cli_{{ .Version }}_SHA256SUMS' name_template: 'databricks_cli_{{ .Version }}_SHA256SUMS'
algorithm: sha256 algorithm: sha256
snapshot: snapshot:
name_template: '{{ incpatch .Version }}-dev+{{ .ShortCommit }}' name_template: '{{ incpatch .Version }}-dev+{{ .ShortCommit }}'
changelog: changelog:
sort: asc sort: asc
filters: filters:

View File

@ -1,5 +1,127 @@
# Version changelog # Version changelog
## 0.223.1
This bugfix release fixes missing error messages in v0.223.0.
CLI:
* Fix logic error in [#1532](https://github.com/databricks/cli/pull/1532) ([#1564](https://github.com/databricks/cli/pull/1564)).
## 0.223.0
Bundles:
As of this release you can interact with bundles when running the CLI on DBR (e.g. via the Web Terminal).
* Fix non-default project names not working in dbt-sql template ([#1500](https://github.com/databricks/cli/pull/1500)).
* Improve `bundle validate` output ([#1532](https://github.com/databricks/cli/pull/1532)).
* Fixed resolving variable references inside slice variable ([#1550](https://github.com/databricks/cli/pull/1550)).
* Fixed bundle not loading when empty variable is defined ([#1552](https://github.com/databricks/cli/pull/1552)).
* Use `vfs.Path` for filesystem interaction ([#1554](https://github.com/databricks/cli/pull/1554)).
* Replace `vfs.Path` with extension-aware filer when running on DBR ([#1556](https://github.com/databricks/cli/pull/1556)).
Internal:
* merge.Override: Fix handling of dyn.NilValue ([#1530](https://github.com/databricks/cli/pull/1530)).
* Compare `.Kind()` instead of direct equality checks on a `dyn.Value` ([#1520](https://github.com/databricks/cli/pull/1520)).
* PythonMutator: register product in user agent extra ([#1533](https://github.com/databricks/cli/pull/1533)).
* Ignore `dyn.NilValue` when traversing value from `dyn.Map` ([#1547](https://github.com/databricks/cli/pull/1547)).
* Add extra tests for the sync block ([#1548](https://github.com/databricks/cli/pull/1548)).
* PythonMutator: add diagnostics ([#1531](https://github.com/databricks/cli/pull/1531)).
* PythonMutator: support omitempty in PyDABs ([#1513](https://github.com/databricks/cli/pull/1513)).
* PythonMutator: allow insert 'resources' and 'resources.jobs' ([#1555](https://github.com/databricks/cli/pull/1555)).
## 0.222.0
CLI:
* Add link to documentation for Homebrew installation to README ([#1505](https://github.com/databricks/cli/pull/1505)).
* Fix `databricks configure` to use `DATABRICKS_CONFIG_FILE` environment variable if exists as config file ([#1325](https://github.com/databricks/cli/pull/1325)).
Bundles:
The Terraform upgrade to v1.48.0 includes a fix for library order not being respected.
* Fix conditional in query in `default-sql` template ([#1479](https://github.com/databricks/cli/pull/1479)).
* Remove user credentials specified in the Git origin URL ([#1494](https://github.com/databricks/cli/pull/1494)).
* Serialize dynamic value for `bundle validate` output ([#1499](https://github.com/databricks/cli/pull/1499)).
* Override variables with lookup value even if values has default value set ([#1504](https://github.com/databricks/cli/pull/1504)).
* Pause quality monitors when "mode: development" is used ([#1481](https://github.com/databricks/cli/pull/1481)).
* Return `fs.ModeDir` for Git folders in the workspace ([#1521](https://github.com/databricks/cli/pull/1521)).
* Upgrade TF provider to 1.48.0 ([#1527](https://github.com/databricks/cli/pull/1527)).
* Added support for complex variables ([#1467](https://github.com/databricks/cli/pull/1467)).
Internal:
* Add randIntn function ([#1475](https://github.com/databricks/cli/pull/1475)).
* Avoid multiple file tree traversals on bundle deploy ([#1493](https://github.com/databricks/cli/pull/1493)).
* Clean up unused code ([#1502](https://github.com/databricks/cli/pull/1502)).
* Use `dyn.InvalidValue` to indicate absence ([#1507](https://github.com/databricks/cli/pull/1507)).
* Add ApplyPythonMutator ([#1430](https://github.com/databricks/cli/pull/1430)).
* Set bool pointer to disable lock ([#1516](https://github.com/databricks/cli/pull/1516)).
* Allow the any type to be set to nil in `convert.FromTyped` ([#1518](https://github.com/databricks/cli/pull/1518)).
* Properly deal with nil values in `convert.FromTyped` ([#1511](https://github.com/databricks/cli/pull/1511)).
* Return `dyn.InvalidValue` instead of `dyn.NilValue` when errors happen ([#1514](https://github.com/databricks/cli/pull/1514)).
* PythonMutator: replace stdin/stdout with files ([#1512](https://github.com/databricks/cli/pull/1512)).
* Add context type and value to path rewriting ([#1525](https://github.com/databricks/cli/pull/1525)).
API Changes:
* Added schedule CRUD commands to `databricks lakeview`.
* Added subscription CRUD commands to `databricks lakeview`.
* Added `databricks apps start` command.
OpenAPI commit 7437dabb9dadee402c1fc060df4c1ce8cc5369f0 (2024-06-24)
Dependency updates:
* Bump golang.org/x/text from 0.15.0 to 0.16.0 ([#1482](https://github.com/databricks/cli/pull/1482)).
* Bump golang.org/x/term from 0.20.0 to 0.21.0 ([#1483](https://github.com/databricks/cli/pull/1483)).
* Bump golang.org/x/mod from 0.17.0 to 0.18.0 ([#1484](https://github.com/databricks/cli/pull/1484)).
* Bump golang.org/x/oauth2 from 0.20.0 to 0.21.0 ([#1485](https://github.com/databricks/cli/pull/1485)).
* Bump github.com/briandowns/spinner from 1.23.0 to 1.23.1 ([#1495](https://github.com/databricks/cli/pull/1495)).
* Bump github.com/spf13/cobra from 1.8.0 to 1.8.1 ([#1496](https://github.com/databricks/cli/pull/1496)).
* Bump github.com/databricks/databricks-sdk-go from 0.42.0 to 0.43.0 ([#1522](https://github.com/databricks/cli/pull/1522)).
## 0.221.1
Bundles:
This releases fixes an issue introduced in v0.221.0 where managing jobs with a single-node cluster would fail.
* Fix SQL schema selection in default-sql template ([#1471](https://github.com/databricks/cli/pull/1471)).
* Copy-editing for SQL templates ([#1474](https://github.com/databricks/cli/pull/1474)).
* Upgrade TF provider to 1.47.0 ([#1476](https://github.com/databricks/cli/pull/1476)).
Internal:
* Use latest version of goreleaser action ([#1477](https://github.com/databricks/cli/pull/1477)).
## 0.221.0
CLI:
* Update OpenAPI spec ([#1466](https://github.com/databricks/cli/pull/1466)).
Bundles:
* Upgrade TF provider to 1.46.0 ([#1460](https://github.com/databricks/cli/pull/1460)).
* Add support for Lakehouse monitoring ([#1307](https://github.com/databricks/cli/pull/1307)).
* Make dbt-sql and default-sql templates public ([#1463](https://github.com/databricks/cli/pull/1463)).
Internal:
* Abstract over filesystem interaction with libs/vfs ([#1452](https://github.com/databricks/cli/pull/1452)).
* Add `filer.Filer` to read notebooks from WSFS without omitting their extension ([#1457](https://github.com/databricks/cli/pull/1457)).
* Fix listing notebooks in a subdirectory ([#1468](https://github.com/databricks/cli/pull/1468)).
API Changes:
* Changed `databricks account storage-credentials list` command to return .
* Added `databricks consumer-listings batch-get` command.
* Added `databricks consumer-providers batch-get` command.
* Removed `databricks apps create-deployment` command.
* Added `databricks apps deploy` command.
OpenAPI commit 37b925eba37dfb3d7e05b6ba2d458454ce62d3a0 (2024-06-03)
Dependency updates:
* Bump github.com/hashicorp/go-version from 1.6.0 to 1.7.0 ([#1454](https://github.com/databricks/cli/pull/1454)).
* Bump github.com/hashicorp/hc-install from 0.6.4 to 0.7.0 ([#1453](https://github.com/databricks/cli/pull/1453)).
## 0.220.0 ## 0.220.0
CLI: CLI:

View File

@ -4,18 +4,21 @@
This project is in Public Preview. This project is in Public Preview.
Documentation about the full REST API coverage is available in the [docs folder](docs/commands.md).
Documentation is available at https://docs.databricks.com/dev-tools/cli/databricks-cli.html. Documentation is available at https://docs.databricks.com/dev-tools/cli/databricks-cli.html.
## Installation ## Installation
This CLI is packaged as a dependency-free binary executable and may be located in any directory. This CLI is packaged as a dependency-free binary executable and may be located in any directory.
See https://github.com/databricks/cli/releases for releases and See https://github.com/databricks/cli/releases for releases and
[the docs pages](https://docs.databricks.com/dev-tools/cli/databricks-cli.html) for the [Databricks documentation](https://docs.databricks.com/en/dev-tools/cli/install.html) for detailed information about installing the CLI.
installation instructions.
------ ------
### Homebrew
We maintain a [Homebrew tap](https://github.com/databricks/homebrew-tap) for installing the Databricks CLI. You can find instructions for how to install, upgrade and downgrade the CLI using Homebrew [here](https://github.com/databricks/homebrew-tap/blob/main/README.md).
------
### Docker
You can use the CLI via a Docker image by pulling the image from `ghcr.io`. You can find all available versions You can use the CLI via a Docker image by pulling the image from `ghcr.io`. You can find all available versions
at: https://github.com/databricks/cli/pkgs/container/cli. at: https://github.com/databricks/cli/pkgs/container/cli.
``` ```

View File

@ -12,6 +12,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/artifacts/whl"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
@ -135,36 +136,57 @@ func uploadArtifact(ctx context.Context, b *bundle.Bundle, a *config.Artifact, u
remotePath := path.Join(wsfsBase, f.RemotePath) remotePath := path.Join(wsfsBase, f.RemotePath)
for _, job := range b.Config.Resources.Jobs { for _, job := range b.Config.Resources.Jobs {
for i := range job.Tasks { rewriteArtifactPath(b, f, job, remotePath)
task := &job.Tasks[i]
for j := range task.Libraries { }
lib := &task.Libraries[j] }
if lib.Whl != "" && isArtifactMatchLibrary(f, lib.Whl, b) {
lib.Whl = remotePath return nil
} }
if lib.Jar != "" && isArtifactMatchLibrary(f, lib.Jar, b) {
lib.Jar = remotePath func rewriteArtifactPath(b *bundle.Bundle, f *config.ArtifactFile, job *resources.Job, remotePath string) {
} // Rewrite artifact path in job task libraries
} for i := range job.Tasks {
task := &job.Tasks[i]
for j := range task.Libraries {
lib := &task.Libraries[j]
if lib.Whl != "" && isArtifactMatchLibrary(f, lib.Whl, b) {
lib.Whl = remotePath
} }
if lib.Jar != "" && isArtifactMatchLibrary(f, lib.Jar, b) {
lib.Jar = remotePath
}
}
for i := range job.Environments { // Rewrite artifact path in job task libraries for ForEachTask
env := &job.Environments[i] if task.ForEachTask != nil {
if env.Spec == nil { forEachTask := task.ForEachTask
continue for j := range forEachTask.Task.Libraries {
lib := &forEachTask.Task.Libraries[j]
if lib.Whl != "" && isArtifactMatchLibrary(f, lib.Whl, b) {
lib.Whl = remotePath
} }
if lib.Jar != "" && isArtifactMatchLibrary(f, lib.Jar, b) {
for j := range env.Spec.Dependencies { lib.Jar = remotePath
lib := env.Spec.Dependencies[j]
if isArtifactMatchLibrary(f, lib, b) {
env.Spec.Dependencies[j] = remotePath
}
} }
} }
} }
} }
return nil // Rewrite artifact path in job environments
for i := range job.Environments {
env := &job.Environments[i]
if env.Spec == nil {
continue
}
for j := range env.Spec.Dependencies {
lib := env.Spec.Dependencies[j]
if isArtifactMatchLibrary(f, lib, b) {
env.Spec.Dependencies[j] = remotePath
}
}
}
} }
func isArtifactMatchLibrary(f *config.ArtifactFile, libPath string, b *bundle.Bundle) bool { func isArtifactMatchLibrary(f *config.ArtifactFile, libPath string, b *bundle.Bundle) bool {

View File

@ -52,6 +52,20 @@ func TestArtifactUpload(t *testing.T) {
}, },
}, },
}, },
{
ForEachTask: &jobs.ForEachTask{
Task: jobs.Task{
Libraries: []compute.Library{
{
Whl: filepath.Join("whl", "*.whl"),
},
{
Whl: "/Workspace/Users/foo@bar.com/mywheel.whl",
},
},
},
},
},
}, },
Environments: []jobs.JobEnvironment{ Environments: []jobs.JobEnvironment{
{ {
@ -88,4 +102,6 @@ func TestArtifactUpload(t *testing.T) {
require.Equal(t, "/Workspace/Users/foo@bar.com/mywheel.whl", b.Config.Resources.Jobs["job"].JobSettings.Tasks[0].Libraries[1].Whl) require.Equal(t, "/Workspace/Users/foo@bar.com/mywheel.whl", b.Config.Resources.Jobs["job"].JobSettings.Tasks[0].Libraries[1].Whl)
require.Equal(t, "/Workspace/foo/bar/artifacts/source.whl", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[0]) require.Equal(t, "/Workspace/foo/bar/artifacts/source.whl", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[0])
require.Equal(t, "/Workspace/Users/foo@bar.com/mywheel.whl", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[1]) require.Equal(t, "/Workspace/Users/foo@bar.com/mywheel.whl", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[1])
require.Equal(t, "/Workspace/foo/bar/artifacts/source.whl", b.Config.Resources.Jobs["job"].JobSettings.Tasks[1].ForEachTask.Task.Libraries[0].Whl)
require.Equal(t, "/Workspace/Users/foo@bar.com/mywheel.whl", b.Config.Resources.Jobs["job"].JobSettings.Tasks[1].ForEachTask.Task.Libraries[1].Whl)
} }

View File

@ -16,7 +16,7 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/env"
"github.com/databricks/cli/bundle/metadata" "github.com/databricks/cli/bundle/metadata"
"github.com/databricks/cli/libs/folders" "github.com/databricks/cli/libs/fileset"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
@ -35,6 +35,10 @@ type Bundle struct {
// It is set when we instantiate a new bundle instance. // It is set when we instantiate a new bundle instance.
RootPath string RootPath string
// BundleRoot is a virtual filesystem path to the root of the bundle.
// Exclusively use this field for filesystem operations.
BundleRoot vfs.Path
Config config.Root Config config.Root
// Metadata about the bundle deployment. This is the interface Databricks services // Metadata about the bundle deployment. This is the interface Databricks services
@ -50,6 +54,9 @@ type Bundle struct {
clientOnce sync.Once clientOnce sync.Once
client *databricks.WorkspaceClient client *databricks.WorkspaceClient
// Files that are synced to the workspace.file_path
Files []fileset.File
// Stores an initialized copy of this bundle's Terraform wrapper. // Stores an initialized copy of this bundle's Terraform wrapper.
Terraform *tfexec.Terraform Terraform *tfexec.Terraform
@ -69,7 +76,8 @@ type Bundle struct {
func Load(ctx context.Context, path string) (*Bundle, error) { func Load(ctx context.Context, path string) (*Bundle, error) {
b := &Bundle{ b := &Bundle{
RootPath: filepath.Clean(path), RootPath: filepath.Clean(path),
BundleRoot: vfs.MustNew(path),
} }
configFile, err := config.FileNames.FindInPath(path) configFile, err := config.FileNames.FindInPath(path)
if err != nil { if err != nil {
@ -204,12 +212,12 @@ func (b *Bundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) {
} }
func (b *Bundle) GitRepository() (*git.Repository, error) { func (b *Bundle) GitRepository() (*git.Repository, error) {
rootPath, err := folders.FindDirWithLeaf(b.RootPath, ".git") _, err := vfs.FindLeafInTree(b.BundleRoot, ".git")
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to locate repository root: %w", err) return nil, fmt.Errorf("unable to locate repository root: %w", err)
} }
return git.NewRepository(vfs.MustNew(rootPath)) return git.NewRepository(b.BundleRoot)
} }
// AuthEnv returns a map with environment variables and their values // AuthEnv returns a map with environment variables and their values

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
) )
@ -23,6 +24,10 @@ func (r ReadOnlyBundle) RootPath() string {
return r.b.RootPath return r.b.RootPath
} }
func (r ReadOnlyBundle) BundleRoot() vfs.Path {
return r.b.BundleRoot
}
func (r ReadOnlyBundle) WorkspaceClient() *databricks.WorkspaceClient { func (r ReadOnlyBundle) WorkspaceClient() *databricks.WorkspaceClient {
return r.b.WorkspaceClient() return r.b.WorkspaceClient()
} }

View File

@ -2,6 +2,8 @@ package bundle
import ( import (
"context" "context"
"errors"
"io/fs"
"os" "os"
"path/filepath" "path/filepath"
"testing" "testing"
@ -14,7 +16,7 @@ import (
func TestLoadNotExists(t *testing.T) { func TestLoadNotExists(t *testing.T) {
b, err := Load(context.Background(), "/doesntexist") b, err := Load(context.Background(), "/doesntexist")
assert.True(t, os.IsNotExist(err)) assert.True(t, errors.Is(err, fs.ErrNotExist))
assert.Nil(t, b) assert.Nil(t, b)
} }

View File

@ -23,6 +23,22 @@ type Experimental struct {
// be removed in the future once we have a proper workaround like allowing IS_OWNER // be removed in the future once we have a proper workaround like allowing IS_OWNER
// as a top-level permission in the DAB. // as a top-level permission in the DAB.
UseLegacyRunAs bool `json:"use_legacy_run_as,omitempty"` UseLegacyRunAs bool `json:"use_legacy_run_as,omitempty"`
// PyDABs determines whether to load the 'databricks-pydabs' package.
//
// PyDABs allows to define bundle configuration using Python.
PyDABs PyDABs `json:"pydabs,omitempty"`
}
type PyDABs struct {
// Enabled is a flag to enable the feature.
Enabled bool `json:"enabled,omitempty"`
// VEnvPath is path to the virtual environment.
//
// Required if PyDABs is enabled. PyDABs will load the code in the specified
// environment.
VEnvPath string `json:"venv_path,omitempty"`
} }
type Command string type Command string

View File

@ -17,7 +17,7 @@ func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) {
for _, task := range job.Settings.Tasks { for _, task := range job.Settings.Tasks {
v, err := convertTaskToValue(task, taskOrder) v, err := convertTaskToValue(task, taskOrder)
if err != nil { if err != nil {
return dyn.NilValue, err return dyn.InvalidValue, err
} }
tasks = append(tasks, v) tasks = append(tasks, v)
} }

View File

@ -0,0 +1,50 @@
package mutator
import (
"context"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/vfs"
)
const envDatabricksRuntimeVersion = "DATABRICKS_RUNTIME_VERSION"
type configureWSFS struct{}
func ConfigureWSFS() bundle.Mutator {
return &configureWSFS{}
}
func (m *configureWSFS) Name() string {
return "ConfigureWSFS"
}
func (m *configureWSFS) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
root := b.BundleRoot.Native()
// The bundle root must be located in /Workspace/
if !strings.HasPrefix(root, "/Workspace/") {
return nil
}
// The executable must be running on DBR.
if _, ok := env.Lookup(ctx, envDatabricksRuntimeVersion); !ok {
return nil
}
// If so, swap out vfs.Path instance of the sync root with one that
// makes all Workspace File System interactions extension aware.
p, err := vfs.NewFilerPath(ctx, root, func(path string) (filer.Filer, error) {
return filer.NewWorkspaceFilesExtensionsClient(b.WorkspaceClient(), path)
})
if err != nil {
return diag.FromErr(err)
}
b.BundleRoot = p
return nil
}

View File

@ -32,18 +32,18 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) dia
targets := v.Get("targets") targets := v.Get("targets")
// Return an error if both "environments" and "targets" are set. // Return an error if both "environments" and "targets" are set.
if environments != dyn.NilValue && targets != dyn.NilValue { if environments.Kind() != dyn.KindInvalid && targets.Kind() != dyn.KindInvalid {
return dyn.NilValue, fmt.Errorf( return dyn.InvalidValue, fmt.Errorf(
"both 'environments' and 'targets' are specified; only 'targets' should be used: %s", "both 'environments' and 'targets' are specified; only 'targets' should be used: %s",
environments.Location().String(), environments.Location().String(),
) )
} }
// Rewrite "environments" to "targets". // Rewrite "environments" to "targets".
if environments != dyn.NilValue && targets == dyn.NilValue { if environments.Kind() != dyn.KindInvalid && targets.Kind() == dyn.KindInvalid {
nv, err := dyn.Set(v, "targets", environments) nv, err := dyn.Set(v, "targets", environments)
if err != nil { if err != nil {
return dyn.NilValue, err return dyn.InvalidValue, err
} }
// Drop the "environments" key. // Drop the "environments" key.
return dyn.Walk(nv, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { return dyn.Walk(nv, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {

View File

@ -8,7 +8,6 @@ import (
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/vfs"
) )
type loadGitDetails struct{} type loadGitDetails struct{}
@ -23,7 +22,7 @@ func (m *loadGitDetails) Name() string {
func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// Load relevant git repository // Load relevant git repository
repo, err := git.NewRepository(vfs.MustNew(b.RootPath)) repo, err := git.NewRepository(b.BundleRoot)
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -21,7 +21,7 @@ func (m *mergeJobClusters) Name() string {
func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string { func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string {
switch v.Kind() { switch v.Kind() {
case dyn.KindNil: case dyn.KindInvalid, dyn.KindNil:
return "" return ""
case dyn.KindString: case dyn.KindString:
return v.MustString() return v.MustString()
@ -32,7 +32,7 @@ func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string {
func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v == dyn.NilValue { if v.Kind() == dyn.KindNil {
return v, nil return v, nil
} }

View File

@ -21,7 +21,7 @@ func (m *mergeJobTasks) Name() string {
func (m *mergeJobTasks) taskKeyString(v dyn.Value) string { func (m *mergeJobTasks) taskKeyString(v dyn.Value) string {
switch v.Kind() { switch v.Kind() {
case dyn.KindNil: case dyn.KindInvalid, dyn.KindNil:
return "" return ""
case dyn.KindString: case dyn.KindString:
return v.MustString() return v.MustString()
@ -32,7 +32,7 @@ func (m *mergeJobTasks) taskKeyString(v dyn.Value) string {
func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v == dyn.NilValue { if v.Kind() == dyn.KindNil {
return v, nil return v, nil
} }

View File

@ -22,7 +22,7 @@ func (m *mergePipelineClusters) Name() string {
func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string { func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string {
switch v.Kind() { switch v.Kind() {
case dyn.KindNil: case dyn.KindInvalid, dyn.KindNil:
// Note: the cluster label is optional and defaults to 'default'. // Note: the cluster label is optional and defaults to 'default'.
// We therefore ALSO merge all clusters without a label. // We therefore ALSO merge all clusters without a label.
return "default" return "default"
@ -35,7 +35,7 @@ func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string {
func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v == dyn.NilValue { if v.Kind() == dyn.KindNil {
return v, nil return v, nil
} }

View File

@ -4,6 +4,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/bundle/config/loader"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
) )
@ -24,12 +25,6 @@ func DefaultMutators() []bundle.Mutator {
InitializeVariables(), InitializeVariables(),
DefineDefaultTarget(), DefineDefaultTarget(),
LoadGitDetails(), LoadGitDetails(),
pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseLoad),
} }
} }
func DefaultMutatorsForTarget(target string) []bundle.Mutator {
return append(
DefaultMutators(),
SelectTarget(target),
)
}

View File

@ -9,8 +9,8 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/ml"
) )
@ -33,10 +33,8 @@ func (m *processTargetMode) Name() string {
func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if !b.Config.Bundle.Deployment.Lock.IsExplicitlyEnabled() { if !b.Config.Bundle.Deployment.Lock.IsExplicitlyEnabled() {
log.Infof(ctx, "Development mode: disabling deployment lock since bundle.deployment.lock.enabled is not set to true") log.Infof(ctx, "Development mode: disabling deployment lock since bundle.deployment.lock.enabled is not set to true")
err := disableDeploymentLock(b) disabled := false
if err != nil { b.Config.Bundle.Deployment.Lock.Enabled = &disabled
return diag.FromErr(err)
}
} }
r := b.Config.Resources r := b.Config.Resources
@ -105,15 +103,16 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) diag.Diagno
// (registered models in Unity Catalog don't yet support tags) // (registered models in Unity Catalog don't yet support tags)
} }
return nil for i := range r.QualityMonitors {
} // Remove all schedules from monitors, since they don't support pausing/unpausing.
// Quality monitors might support the "pause" property in the future, so at the
// CLI level we do respect that property if it is set to "unpaused".
if r.QualityMonitors[i].Schedule != nil && r.QualityMonitors[i].Schedule.PauseStatus != catalog.MonitorCronSchedulePauseStatusUnpaused {
r.QualityMonitors[i].Schedule = nil
}
}
func disableDeploymentLock(b *bundle.Bundle) error { return nil
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
return dyn.Map(v, "bundle.deployment.lock", func(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return dyn.Set(v, "enabled", dyn.V(false))
})
})
} }
func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics { func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {

View File

@ -99,6 +99,20 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
}, },
QualityMonitors: map[string]*resources.QualityMonitor{ QualityMonitors: map[string]*resources.QualityMonitor{
"qualityMonitor1": {CreateMonitor: &catalog.CreateMonitor{TableName: "qualityMonitor1"}}, "qualityMonitor1": {CreateMonitor: &catalog.CreateMonitor{TableName: "qualityMonitor1"}},
"qualityMonitor2": {
CreateMonitor: &catalog.CreateMonitor{
TableName: "qualityMonitor2",
Schedule: &catalog.MonitorCronSchedule{},
},
},
"qualityMonitor3": {
CreateMonitor: &catalog.CreateMonitor{
TableName: "qualityMonitor3",
Schedule: &catalog.MonitorCronSchedule{
PauseStatus: catalog.MonitorCronSchedulePauseStatusUnpaused,
},
},
},
}, },
}, },
}, },
@ -151,6 +165,8 @@ func TestProcessTargetModeDevelopment(t *testing.T) {
// Quality Monitor 1 // Quality Monitor 1
assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName) assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName)
assert.Nil(t, b.Config.Resources.QualityMonitors["qualityMonitor2"].Schedule)
assert.Equal(t, catalog.MonitorCronSchedulePauseStatusUnpaused, b.Config.Resources.QualityMonitors["qualityMonitor3"].Schedule.PauseStatus)
} }
func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) { func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) {
@ -314,7 +330,7 @@ func TestDisableLocking(t *testing.T) {
ctx := context.Background() ctx := context.Background()
b := mockBundle(config.Development) b := mockBundle(config.Development)
err := transformDevelopmentMode(ctx, b) err := bundle.Apply(ctx, b, ProcessTargetMode())
require.Nil(t, err) require.Nil(t, err)
assert.False(t, b.Config.Bundle.Deployment.Lock.IsEnabled()) assert.False(t, b.Config.Bundle.Deployment.Lock.IsEnabled())
} }
@ -325,7 +341,7 @@ func TestDisableLockingDisabled(t *testing.T) {
explicitlyEnabled := true explicitlyEnabled := true
b.Config.Bundle.Deployment.Lock.Enabled = &explicitlyEnabled b.Config.Bundle.Deployment.Lock.Enabled = &explicitlyEnabled
err := transformDevelopmentMode(ctx, b) err := bundle.Apply(ctx, b, ProcessTargetMode())
require.Nil(t, err) require.Nil(t, err)
assert.True(t, b.Config.Bundle.Deployment.Lock.IsEnabled(), "Deployment lock should remain enabled in development mode when explicitly enabled") assert.True(t, b.Config.Bundle.Deployment.Lock.IsEnabled(), "Deployment lock should remain enabled in development mode when explicitly enabled")
} }

View File

@ -0,0 +1,42 @@
package python
import (
"bufio"
"bytes"
"context"
"io"
"github.com/databricks/cli/libs/log"
)
type logWriter struct {
ctx context.Context
prefix string
buf bytes.Buffer
}
// newLogWriter creates a new io.Writer that writes to log with specified prefix.
func newLogWriter(ctx context.Context, prefix string) io.Writer {
return &logWriter{
ctx: ctx,
prefix: prefix,
}
}
func (p *logWriter) Write(bytes []byte) (n int, err error) {
p.buf.Write(bytes)
scanner := bufio.NewScanner(&p.buf)
for scanner.Scan() {
line := scanner.Text()
log.Debugf(p.ctx, "%s%s", p.prefix, line)
}
remaining := p.buf.Bytes()
p.buf.Reset()
p.buf.Write(remaining)
return len(bytes), nil
}

View File

@ -0,0 +1,97 @@
package python
import (
"encoding/json"
"fmt"
"io"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
)
type pythonDiagnostic struct {
Severity pythonSeverity `json:"severity"`
Summary string `json:"summary"`
Detail string `json:"detail,omitempty"`
Location pythonDiagnosticLocation `json:"location,omitempty"`
Path string `json:"path,omitempty"`
}
type pythonDiagnosticLocation struct {
File string `json:"file"`
Line int `json:"line"`
Column int `json:"column"`
}
type pythonSeverity = string
const (
pythonError pythonSeverity = "error"
pythonWarning pythonSeverity = "warning"
)
// parsePythonDiagnostics parses diagnostics from the Python mutator.
//
// diagnostics file is newline-separated JSON objects with pythonDiagnostic structure.
func parsePythonDiagnostics(input io.Reader) (diag.Diagnostics, error) {
diags := diag.Diagnostics{}
decoder := json.NewDecoder(input)
for decoder.More() {
var parsedLine pythonDiagnostic
err := decoder.Decode(&parsedLine)
if err != nil {
return nil, fmt.Errorf("failed to parse diags: %s", err)
}
severity, err := convertPythonSeverity(parsedLine.Severity)
if err != nil {
return nil, fmt.Errorf("failed to parse severity: %s", err)
}
path, err := convertPythonPath(parsedLine.Path)
if err != nil {
return nil, fmt.Errorf("failed to parse path: %s", err)
}
diag := diag.Diagnostic{
Severity: severity,
Summary: parsedLine.Summary,
Detail: parsedLine.Detail,
Location: convertPythonLocation(parsedLine.Location),
Path: path,
}
diags = diags.Append(diag)
}
return diags, nil
}
func convertPythonPath(path string) (dyn.Path, error) {
if path == "" {
return nil, nil
}
return dyn.NewPathFromString(path)
}
func convertPythonSeverity(severity pythonSeverity) (diag.Severity, error) {
switch severity {
case pythonError:
return diag.Error, nil
case pythonWarning:
return diag.Warning, nil
default:
return 0, fmt.Errorf("unexpected value: %s", severity)
}
}
func convertPythonLocation(location pythonDiagnosticLocation) dyn.Location {
return dyn.Location{
File: location.File,
Line: location.Line,
Column: location.Column,
}
}

View File

@ -0,0 +1,107 @@
package python
import (
"bytes"
"testing"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
)
func TestConvertPythonLocation(t *testing.T) {
location := convertPythonLocation(pythonDiagnosticLocation{
File: "src/examples/file.py",
Line: 1,
Column: 2,
})
assert.Equal(t, dyn.Location{
File: "src/examples/file.py",
Line: 1,
Column: 2,
}, location)
}
type parsePythonDiagnosticsTest struct {
name string
input string
expected diag.Diagnostics
}
func TestParsePythonDiagnostics(t *testing.T) {
testCases := []parsePythonDiagnosticsTest{
{
name: "short error with location",
input: `{"severity": "error", "summary": "error summary", "location": {"file": "src/examples/file.py", "line": 1, "column": 2}}`,
expected: diag.Diagnostics{
{
Severity: diag.Error,
Summary: "error summary",
Location: dyn.Location{
File: "src/examples/file.py",
Line: 1,
Column: 2,
},
},
},
},
{
name: "short error with path",
input: `{"severity": "error", "summary": "error summary", "path": "resources.jobs.job0.name"}`,
expected: diag.Diagnostics{
{
Severity: diag.Error,
Summary: "error summary",
Path: dyn.MustPathFromString("resources.jobs.job0.name"),
},
},
},
{
name: "empty file",
input: "",
expected: diag.Diagnostics{},
},
{
name: "newline file",
input: "\n",
expected: diag.Diagnostics{},
},
{
name: "warning with detail",
input: `{"severity": "warning", "summary": "warning summary", "detail": "warning detail"}`,
expected: diag.Diagnostics{
{
Severity: diag.Warning,
Summary: "warning summary",
Detail: "warning detail",
},
},
},
{
name: "multiple errors",
input: `{"severity": "error", "summary": "error summary (1)"}` + "\n" +
`{"severity": "error", "summary": "error summary (2)"}`,
expected: diag.Diagnostics{
{
Severity: diag.Error,
Summary: "error summary (1)",
},
{
Severity: diag.Error,
Summary: "error summary (2)",
},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
diags, err := parsePythonDiagnostics(bytes.NewReader([]byte(tc.input)))
assert.NoError(t, err)
assert.Equal(t, tc.expected, diags)
})
}
}

View File

@ -0,0 +1,433 @@
package python
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"runtime"
"github.com/databricks/databricks-sdk-go/logger"
"github.com/databricks/cli/bundle/env"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/process"
)
type phase string
const (
// PythonMutatorPhaseLoad is the phase in which bundle configuration is loaded.
//
// At this stage, PyDABs adds statically defined resources to the bundle configuration.
// Which resources are added should be deterministic and not depend on the bundle configuration.
//
// We also open for possibility of appending other sections of bundle configuration,
// for example, adding new variables. However, this is not supported yet, and CLI rejects
// such changes.
PythonMutatorPhaseLoad phase = "load"
// PythonMutatorPhaseInit is the phase after bundle configuration was loaded, and
// the list of statically declared resources is known.
//
// At this stage, PyDABs adds resources defined using generators, or mutates existing resources,
// including the ones defined using YAML.
//
// During this process, within generator and mutators, PyDABs can access:
// - selected deployment target
// - bundle variables values
// - variables provided through CLI arguments or environment variables
//
// The following is not available:
// - variables referencing other variables are in unresolved format
//
// PyDABs can output YAML containing references to variables, and CLI should resolve them.
//
// Existing resources can't be removed, and CLI rejects such changes.
PythonMutatorPhaseInit phase = "init"
)
type pythonMutator struct {
phase phase
}
func PythonMutator(phase phase) bundle.Mutator {
return &pythonMutator{
phase: phase,
}
}
func (m *pythonMutator) Name() string {
return fmt.Sprintf("PythonMutator(%s)", m.phase)
}
func getExperimental(b *bundle.Bundle) config.Experimental {
if b.Config.Experimental == nil {
return config.Experimental{}
}
return *b.Config.Experimental
}
func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
experimental := getExperimental(b)
if !experimental.PyDABs.Enabled {
return nil
}
if experimental.PyDABs.VEnvPath == "" {
return diag.Errorf("\"experimental.pydabs.enabled\" can only be used when \"experimental.pydabs.venv_path\" is set")
}
// mutateDiags is used because Mutate returns 'error' instead of 'diag.Diagnostics'
var mutateDiags diag.Diagnostics
var mutateDiagsHasError = errors.New("unexpected error")
err := b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) {
pythonPath := interpreterPath(experimental.PyDABs.VEnvPath)
if _, err := os.Stat(pythonPath); err != nil {
if os.IsNotExist(err) {
return dyn.InvalidValue, fmt.Errorf("can't find %q, check if venv is created", pythonPath)
} else {
return dyn.InvalidValue, fmt.Errorf("can't find %q: %w", pythonPath, err)
}
}
cacheDir, err := createCacheDir(ctx)
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to create cache dir: %w", err)
}
rightRoot, diags := m.runPythonMutator(ctx, cacheDir, b.RootPath, pythonPath, leftRoot)
mutateDiags = diags
if diags.HasError() {
return dyn.InvalidValue, mutateDiagsHasError
}
visitor, err := createOverrideVisitor(ctx, m.phase)
if err != nil {
return dyn.InvalidValue, err
}
return merge.Override(leftRoot, rightRoot, visitor)
})
if err == mutateDiagsHasError {
if !mutateDiags.HasError() {
panic("mutateDiags has no error, but error is expected")
}
return mutateDiags
}
return mutateDiags.Extend(diag.FromErr(err))
}
func createCacheDir(ctx context.Context) (string, error) {
// b.CacheDir doesn't work because target isn't yet selected
// support the same env variable as in b.CacheDir
if tempDir, exists := env.TempDir(ctx); exists {
// use 'default' as target name
cacheDir := filepath.Join(tempDir, "default", "pydabs")
err := os.MkdirAll(cacheDir, 0700)
if err != nil {
return "", err
}
return cacheDir, nil
}
return os.MkdirTemp("", "-pydabs")
}
func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, rootPath string, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
inputPath := filepath.Join(cacheDir, "input.json")
outputPath := filepath.Join(cacheDir, "output.json")
diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json")
args := []string{
pythonPath,
"-m",
"databricks.bundles.build",
"--phase",
string(m.phase),
"--input",
inputPath,
"--output",
outputPath,
"--diagnostics",
diagnosticsPath,
}
if err := writeInputFile(inputPath, root); err != nil {
return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err)
}
stderrWriter := newLogWriter(ctx, "stderr: ")
stdoutWriter := newLogWriter(ctx, "stdout: ")
_, processErr := process.Background(
ctx,
args,
process.WithDir(rootPath),
process.WithStderrWriter(stderrWriter),
process.WithStdoutWriter(stdoutWriter),
)
if processErr != nil {
logger.Debugf(ctx, "python mutator process failed: %s", processErr)
}
pythonDiagnostics, pythonDiagnosticsErr := loadDiagnosticsFile(diagnosticsPath)
if pythonDiagnosticsErr != nil {
logger.Debugf(ctx, "failed to load diagnostics: %s", pythonDiagnosticsErr)
}
// if diagnostics file exists, it gives the most descriptive errors
// if there is any error, we treat it as fatal error, and stop processing
if pythonDiagnostics.HasError() {
return dyn.InvalidValue, pythonDiagnostics
}
// process can fail without reporting errors in diagnostics file or creating it, for instance,
// venv doesn't have PyDABs library installed
if processErr != nil {
return dyn.InvalidValue, diag.Errorf("python mutator process failed: %sw, use --debug to enable logging", processErr)
}
// or we can fail to read diagnostics file, that should always be created
if pythonDiagnosticsErr != nil {
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
}
output, err := loadOutputFile(rootPath, outputPath)
if err != nil {
return dyn.InvalidValue, diag.Errorf("failed to load Python mutator output: %s", err)
}
// we pass through pythonDiagnostic because it contains warnings
return output, pythonDiagnostics
}
func writeInputFile(inputPath string, input dyn.Value) error {
// we need to marshal dyn.Value instead of bundle.Config to JSON to support
// non-string fields assigned with bundle variables
rootConfigJson, err := json.Marshal(input.AsAny())
if err != nil {
return fmt.Errorf("failed to marshal input: %w", err)
}
return os.WriteFile(inputPath, rootConfigJson, 0600)
}
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
outputFile, err := os.Open(outputPath)
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to open output file: %w", err)
}
defer outputFile.Close()
// we need absolute path because later parts of pipeline assume all paths are absolute
// and this file will be used as location to resolve relative paths.
//
// virtualPath has to stay in rootPath, because locations outside root path are not allowed:
//
// Error: path /var/folders/.../pydabs/dist/*.whl is not contained in bundle root path
//
// for that, we pass virtualPath instead of outputPath as file location
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml"))
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to get absolute path: %w", err)
}
generated, err := yamlloader.LoadYAML(virtualPath, outputFile)
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to parse output file: %w", err)
}
normalized, diagnostic := convert.Normalize(config.Root{}, generated)
if diagnostic.Error() != nil {
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %w", diagnostic.Error())
}
// warnings shouldn't happen because output should be already normalized
// when it happens, it's a bug in the mutator, and should be treated as an error
for _, d := range diagnostic.Filter(diag.Warning) {
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %s", d.Summary)
}
return normalized, nil
}
// loadDiagnosticsFile loads diagnostics from a file.
//
// It contains a list of warnings and errors that we should print to users.
//
// If the file doesn't exist, we return an error. We expect the file to always be
// created by the Python mutator, and it's absence means there are integration problems,
// and the diagnostics file was lost. If we treat non-existence as an empty diag.Diagnostics
// we risk loosing errors and warnings.
func loadDiagnosticsFile(path string) (diag.Diagnostics, error) {
file, err := os.Open(path)
if err != nil {
return nil, fmt.Errorf("failed to open diagnostics file: %w", err)
}
defer file.Close()
return parsePythonDiagnostics(file)
}
func createOverrideVisitor(ctx context.Context, phase phase) (merge.OverrideVisitor, error) {
switch phase {
case PythonMutatorPhaseLoad:
return createLoadOverrideVisitor(ctx), nil
case PythonMutatorPhaseInit:
return createInitOverrideVisitor(ctx), nil
default:
return merge.OverrideVisitor{}, fmt.Errorf("unknown phase: %s", phase)
}
}
// createLoadOverrideVisitor creates an override visitor for the load phase.
//
// During load, it's only possible to create new resources, and not modify or
// delete existing ones.
func createLoadOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
resourcesPath := dyn.NewPath(dyn.Key("resources"))
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
return merge.OverrideVisitor{
VisitDelete: func(valuePath dyn.Path, left dyn.Value) error {
if isOmitemptyDelete(left) {
return merge.ErrOverrideUndoDelete
}
return fmt.Errorf("unexpected change at %q (delete)", valuePath.String())
},
VisitInsert: func(valuePath dyn.Path, right dyn.Value) (dyn.Value, error) {
// insert 'resources' or 'resources.jobs' if it didn't exist before
if valuePath.Equal(resourcesPath) || valuePath.Equal(jobsPath) {
return right, nil
}
if !valuePath.HasPrefix(jobsPath) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (insert)", valuePath.String())
}
insertResource := len(valuePath) == len(jobsPath)+1
// adding a property into an existing resource is not allowed, because it changes it
if !insertResource {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (insert)", valuePath.String())
}
log.Debugf(ctx, "Insert value at %q", valuePath.String())
return right, nil
},
VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String())
},
}
}
// createInitOverrideVisitor creates an override visitor for the init phase.
//
// During the init phase it's possible to create new resources, modify existing
// resources, but not delete existing resources.
func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
resourcesPath := dyn.NewPath(dyn.Key("resources"))
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
return merge.OverrideVisitor{
VisitDelete: func(valuePath dyn.Path, left dyn.Value) error {
if isOmitemptyDelete(left) {
return merge.ErrOverrideUndoDelete
}
if !valuePath.HasPrefix(jobsPath) {
return fmt.Errorf("unexpected change at %q (delete)", valuePath.String())
}
deleteResource := len(valuePath) == len(jobsPath)+1
if deleteResource {
return fmt.Errorf("unexpected change at %q (delete)", valuePath.String())
}
// deleting properties is allowed because it only changes an existing resource
log.Debugf(ctx, "Delete value at %q", valuePath.String())
return nil
},
VisitInsert: func(valuePath dyn.Path, right dyn.Value) (dyn.Value, error) {
// insert 'resources' or 'resources.jobs' if it didn't exist before
if valuePath.Equal(resourcesPath) || valuePath.Equal(jobsPath) {
return right, nil
}
if !valuePath.HasPrefix(jobsPath) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (insert)", valuePath.String())
}
log.Debugf(ctx, "Insert value at %q", valuePath.String())
return right, nil
},
VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) {
if !valuePath.HasPrefix(jobsPath) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String())
}
log.Debugf(ctx, "Update value at %q", valuePath.String())
return right, nil
},
}
}
func isOmitemptyDelete(left dyn.Value) bool {
// PyDABs can omit empty sequences/mappings in output, because we don't track them as optional,
// there is no semantic difference between empty and missing, so we keep them as they were before
// PyDABs deleted them.
switch left.Kind() {
case dyn.KindMap:
return left.MustMap().Len() == 0
case dyn.KindSequence:
return len(left.MustSequence()) == 0
case dyn.KindNil:
// map/sequence can be nil, for instance, bad YAML like: `foo:<eof>`
return true
default:
return false
}
}
// interpreterPath returns platform-specific path to Python interpreter in the virtual environment.
func interpreterPath(venvPath string) string {
if runtime.GOOS == "windows" {
return filepath.Join(venvPath, "Scripts", "python3.exe")
} else {
return filepath.Join(venvPath, "bin", "python3")
}
}

View File

@ -0,0 +1,625 @@
package python
import (
"context"
"fmt"
"os"
"os/exec"
"path/filepath"
"reflect"
"runtime"
"testing"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/bundle/env"
"github.com/stretchr/testify/require"
"golang.org/x/exp/maps"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/databricks/cli/libs/process"
)
func TestPythonMutator_Name_load(t *testing.T) {
mutator := PythonMutator(PythonMutatorPhaseLoad)
assert.Equal(t, "PythonMutator(load)", mutator.Name())
}
func TestPythonMutator_Name_init(t *testing.T) {
mutator := PythonMutator(PythonMutatorPhaseInit)
assert.Equal(t, "PythonMutator(init)", mutator.Name())
}
func TestPythonMutator_load(t *testing.T) {
withFakeVEnv(t, ".venv")
b := loadYaml("databricks.yml", `
experimental:
pydabs:
enabled: true
venv_path: .venv
resources:
jobs:
job0:
name: job_0`)
ctx := withProcessStub(
t,
[]string{
interpreterPath(".venv"),
"-m",
"databricks.bundles.build",
"--phase",
"load",
},
`{
"experimental": {
"pydabs": {
"enabled": true,
"venv_path": ".venv"
}
},
"resources": {
"jobs": {
"job0": {
name: "job_0"
},
"job1": {
name: "job_1"
},
}
}
}`,
`{"severity": "warning", "summary": "job doesn't have any tasks", "location": {"file": "src/examples/file.py", "line": 10, "column": 5}}`,
)
mutator := PythonMutator(PythonMutatorPhaseLoad)
diags := bundle.Apply(ctx, b, mutator)
assert.NoError(t, diags.Error())
assert.ElementsMatch(t, []string{"job0", "job1"}, maps.Keys(b.Config.Resources.Jobs))
if job0, ok := b.Config.Resources.Jobs["job0"]; ok {
assert.Equal(t, "job_0", job0.Name)
}
if job1, ok := b.Config.Resources.Jobs["job1"]; ok {
assert.Equal(t, "job_1", job1.Name)
}
assert.Equal(t, 1, len(diags))
assert.Equal(t, "job doesn't have any tasks", diags[0].Summary)
assert.Equal(t, dyn.Location{
File: "src/examples/file.py",
Line: 10,
Column: 5,
}, diags[0].Location)
}
func TestPythonMutator_load_disallowed(t *testing.T) {
withFakeVEnv(t, ".venv")
b := loadYaml("databricks.yml", `
experimental:
pydabs:
enabled: true
venv_path: .venv
resources:
jobs:
job0:
name: job_0`)
ctx := withProcessStub(
t,
[]string{
interpreterPath(".venv"),
"-m",
"databricks.bundles.build",
"--phase",
"load",
},
`{
"experimental": {
"pydabs": {
"enabled": true,
"venv_path": ".venv"
}
},
"resources": {
"jobs": {
"job0": {
name: "job_0",
description: "job description"
}
}
}
}`, "")
mutator := PythonMutator(PythonMutatorPhaseLoad)
diag := bundle.Apply(ctx, b, mutator)
assert.EqualError(t, diag.Error(), "unexpected change at \"resources.jobs.job0.description\" (insert)")
}
func TestPythonMutator_init(t *testing.T) {
withFakeVEnv(t, ".venv")
b := loadYaml("databricks.yml", `
experimental:
pydabs:
enabled: true
venv_path: .venv
resources:
jobs:
job0:
name: job_0`)
ctx := withProcessStub(
t,
[]string{
interpreterPath(".venv"),
"-m",
"databricks.bundles.build",
"--phase",
"init",
},
`{
"experimental": {
"pydabs": {
"enabled": true,
"venv_path": ".venv"
}
},
"resources": {
"jobs": {
"job0": {
name: "job_0",
description: "my job"
}
}
}
}`, "")
mutator := PythonMutator(PythonMutatorPhaseInit)
diag := bundle.Apply(ctx, b, mutator)
assert.NoError(t, diag.Error())
assert.ElementsMatch(t, []string{"job0"}, maps.Keys(b.Config.Resources.Jobs))
assert.Equal(t, "job_0", b.Config.Resources.Jobs["job0"].Name)
assert.Equal(t, "my job", b.Config.Resources.Jobs["job0"].Description)
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
// 'name' wasn't changed, so it keeps its location
name, err := dyn.GetByPath(v, dyn.MustPathFromString("resources.jobs.job0.name"))
require.NoError(t, err)
assert.Equal(t, "databricks.yml", name.Location().File)
// 'description' was updated by PyDABs and has location of generated file until
// we implement source maps
description, err := dyn.GetByPath(v, dyn.MustPathFromString("resources.jobs.job0.description"))
require.NoError(t, err)
expectedVirtualPath, err := filepath.Abs("__generated_by_pydabs__.yml")
require.NoError(t, err)
assert.Equal(t, expectedVirtualPath, description.Location().File)
return v, nil
})
assert.NoError(t, err)
}
func TestPythonMutator_badOutput(t *testing.T) {
withFakeVEnv(t, ".venv")
b := loadYaml("databricks.yml", `
experimental:
pydabs:
enabled: true
venv_path: .venv
resources:
jobs:
job0:
name: job_0`)
ctx := withProcessStub(
t,
[]string{
interpreterPath(".venv"),
"-m",
"databricks.bundles.build",
"--phase",
"load",
},
`{
"resources": {
"jobs": {
"job0": {
unknown_property: "my job"
}
}
}
}`, "")
mutator := PythonMutator(PythonMutatorPhaseLoad)
diag := bundle.Apply(ctx, b, mutator)
assert.EqualError(t, diag.Error(), "failed to load Python mutator output: failed to normalize output: unknown field: unknown_property")
}
func TestPythonMutator_disabled(t *testing.T) {
b := loadYaml("databricks.yml", ``)
ctx := context.Background()
mutator := PythonMutator(PythonMutatorPhaseLoad)
diag := bundle.Apply(ctx, b, mutator)
assert.NoError(t, diag.Error())
}
func TestPythonMutator_venvRequired(t *testing.T) {
b := loadYaml("databricks.yml", `
experimental:
pydabs:
enabled: true`)
ctx := context.Background()
mutator := PythonMutator(PythonMutatorPhaseLoad)
diag := bundle.Apply(ctx, b, mutator)
assert.Error(t, diag.Error(), "\"experimental.enable_pydabs\" is enabled, but \"experimental.venv.path\" is not set")
}
func TestPythonMutator_venvNotFound(t *testing.T) {
expectedError := fmt.Sprintf("can't find %q, check if venv is created", interpreterPath("bad_path"))
b := loadYaml("databricks.yml", `
experimental:
pydabs:
enabled: true
venv_path: bad_path`)
mutator := PythonMutator(PythonMutatorPhaseInit)
diag := bundle.Apply(context.Background(), b, mutator)
assert.EqualError(t, diag.Error(), expectedError)
}
type createOverrideVisitorTestCase struct {
name string
updatePath dyn.Path
deletePath dyn.Path
insertPath dyn.Path
phase phase
updateError error
deleteError error
insertError error
}
func TestCreateOverrideVisitor(t *testing.T) {
left := dyn.NewValue(42, dyn.Location{})
right := dyn.NewValue(1337, dyn.Location{})
testCases := []createOverrideVisitorTestCase{
{
name: "load: can't change an existing job",
phase: PythonMutatorPhaseLoad,
updatePath: dyn.MustPathFromString("resources.jobs.job0.name"),
deletePath: dyn.MustPathFromString("resources.jobs.job0.name"),
insertPath: dyn.MustPathFromString("resources.jobs.job0.name"),
deleteError: fmt.Errorf("unexpected change at \"resources.jobs.job0.name\" (delete)"),
insertError: fmt.Errorf("unexpected change at \"resources.jobs.job0.name\" (insert)"),
updateError: fmt.Errorf("unexpected change at \"resources.jobs.job0.name\" (update)"),
},
{
name: "load: can't delete an existing job",
phase: PythonMutatorPhaseLoad,
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
deleteError: fmt.Errorf("unexpected change at \"resources.jobs.job0\" (delete)"),
},
{
name: "load: can insert 'resources'",
phase: PythonMutatorPhaseLoad,
insertPath: dyn.MustPathFromString("resources"),
insertError: nil,
},
{
name: "load: can insert 'resources.jobs'",
phase: PythonMutatorPhaseLoad,
insertPath: dyn.MustPathFromString("resources.jobs"),
insertError: nil,
},
{
name: "load: can insert a job",
phase: PythonMutatorPhaseLoad,
insertPath: dyn.MustPathFromString("resources.jobs.job0"),
insertError: nil,
},
{
name: "load: can't change include",
phase: PythonMutatorPhaseLoad,
deletePath: dyn.MustPathFromString("include[0]"),
insertPath: dyn.MustPathFromString("include[0]"),
updatePath: dyn.MustPathFromString("include[0]"),
deleteError: fmt.Errorf("unexpected change at \"include[0]\" (delete)"),
insertError: fmt.Errorf("unexpected change at \"include[0]\" (insert)"),
updateError: fmt.Errorf("unexpected change at \"include[0]\" (update)"),
},
{
name: "init: can change an existing job",
phase: PythonMutatorPhaseInit,
updatePath: dyn.MustPathFromString("resources.jobs.job0.name"),
deletePath: dyn.MustPathFromString("resources.jobs.job0.name"),
insertPath: dyn.MustPathFromString("resources.jobs.job0.name"),
deleteError: nil,
insertError: nil,
updateError: nil,
},
{
name: "init: can't delete an existing job",
phase: PythonMutatorPhaseInit,
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
deleteError: fmt.Errorf("unexpected change at \"resources.jobs.job0\" (delete)"),
},
{
name: "init: can insert 'resources'",
phase: PythonMutatorPhaseInit,
insertPath: dyn.MustPathFromString("resources"),
insertError: nil,
},
{
name: "init: can insert 'resources.jobs'",
phase: PythonMutatorPhaseInit,
insertPath: dyn.MustPathFromString("resources.jobs"),
insertError: nil,
},
{
name: "init: can insert a job",
phase: PythonMutatorPhaseInit,
insertPath: dyn.MustPathFromString("resources.jobs.job0"),
insertError: nil,
},
{
name: "init: can't change include",
phase: PythonMutatorPhaseInit,
deletePath: dyn.MustPathFromString("include[0]"),
insertPath: dyn.MustPathFromString("include[0]"),
updatePath: dyn.MustPathFromString("include[0]"),
deleteError: fmt.Errorf("unexpected change at \"include[0]\" (delete)"),
insertError: fmt.Errorf("unexpected change at \"include[0]\" (insert)"),
updateError: fmt.Errorf("unexpected change at \"include[0]\" (update)"),
},
}
for _, tc := range testCases {
visitor, err := createOverrideVisitor(context.Background(), tc.phase)
if err != nil {
t.Fatalf("create visitor failed: %v", err)
}
if tc.updatePath != nil {
t.Run(tc.name+"-update", func(t *testing.T) {
out, err := visitor.VisitUpdate(tc.updatePath, left, right)
if tc.updateError != nil {
assert.Equal(t, tc.updateError, err)
} else {
assert.NoError(t, err)
assert.Equal(t, right, out)
}
})
}
if tc.deletePath != nil {
t.Run(tc.name+"-delete", func(t *testing.T) {
err := visitor.VisitDelete(tc.deletePath, left)
if tc.deleteError != nil {
assert.Equal(t, tc.deleteError, err)
} else {
assert.NoError(t, err)
}
})
}
if tc.insertPath != nil {
t.Run(tc.name+"-insert", func(t *testing.T) {
out, err := visitor.VisitInsert(tc.insertPath, right)
if tc.insertError != nil {
assert.Equal(t, tc.insertError, err)
} else {
assert.NoError(t, err)
assert.Equal(t, right, out)
}
})
}
}
}
type overrideVisitorOmitemptyTestCase struct {
name string
path dyn.Path
left dyn.Value
phases []phase
expectedErr error
}
func TestCreateOverrideVisitor_omitempty(t *testing.T) {
// PyDABs can omit empty sequences/mappings in output, because we don't track them as optional,
// there is no semantic difference between empty and missing, so we keep them as they were before
// PyDABs deleted them.
allPhases := []phase{PythonMutatorPhaseLoad, PythonMutatorPhaseInit}
location := dyn.Location{
File: "databricks.yml",
Line: 10,
Column: 20,
}
testCases := []overrideVisitorOmitemptyTestCase{
{
// this is not happening, but adding for completeness
name: "undo delete of empty variables",
path: dyn.MustPathFromString("variables"),
left: dyn.NewValue([]dyn.Value{}, location),
expectedErr: merge.ErrOverrideUndoDelete,
phases: allPhases,
},
{
name: "undo delete of empty job clusters",
path: dyn.MustPathFromString("resources.jobs.job0.job_clusters"),
left: dyn.NewValue([]dyn.Value{}, location),
expectedErr: merge.ErrOverrideUndoDelete,
phases: allPhases,
},
{
name: "allow delete of non-empty job clusters",
path: dyn.MustPathFromString("resources.jobs.job0.job_clusters"),
left: dyn.NewValue([]dyn.Value{dyn.NewValue("abc", location)}, location),
expectedErr: nil,
// deletions aren't allowed in 'load' phase
phases: []phase{PythonMutatorPhaseInit},
},
{
name: "undo delete of empty tags",
path: dyn.MustPathFromString("resources.jobs.job0.tags"),
left: dyn.NewValue(map[string]dyn.Value{}, location),
expectedErr: merge.ErrOverrideUndoDelete,
phases: allPhases,
},
{
name: "allow delete of non-empty tags",
path: dyn.MustPathFromString("resources.jobs.job0.tags"),
left: dyn.NewValue(
map[string]dyn.Value{"dev": dyn.NewValue("true", location)},
location,
),
expectedErr: nil,
// deletions aren't allowed in 'load' phase
phases: []phase{PythonMutatorPhaseInit},
},
{
name: "undo delete of nil",
path: dyn.MustPathFromString("resources.jobs.job0.tags"),
left: dyn.NilValue.WithLocation(location),
expectedErr: merge.ErrOverrideUndoDelete,
phases: allPhases,
},
}
for _, tc := range testCases {
for _, phase := range tc.phases {
t.Run(tc.name+"-"+string(phase), func(t *testing.T) {
visitor, err := createOverrideVisitor(context.Background(), phase)
require.NoError(t, err)
err = visitor.VisitDelete(tc.path, tc.left)
assert.Equal(t, tc.expectedErr, err)
})
}
}
}
func TestLoadDiagnosticsFile_nonExistent(t *testing.T) {
// this is an important behaviour, see loadDiagnosticsFile docstring
_, err := loadDiagnosticsFile("non_existent_file.json")
assert.Error(t, err)
}
func TestInterpreterPath(t *testing.T) {
if runtime.GOOS == "windows" {
assert.Equal(t, "venv\\Scripts\\python3.exe", interpreterPath("venv"))
} else {
assert.Equal(t, "venv/bin/python3", interpreterPath("venv"))
}
}
func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context {
ctx := context.Background()
ctx, stub := process.WithStub(ctx)
t.Setenv(env.TempDirVariable, t.TempDir())
// after we override env variable, we always get the same cache dir as mutator
cacheDir, err := createCacheDir(ctx)
require.NoError(t, err)
inputPath := filepath.Join(cacheDir, "input.json")
outputPath := filepath.Join(cacheDir, "output.json")
diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json")
args = append(args, "--input", inputPath)
args = append(args, "--output", outputPath)
args = append(args, "--diagnostics", diagnosticsPath)
stub.WithCallback(func(actual *exec.Cmd) error {
_, err := os.Stat(inputPath)
assert.NoError(t, err)
if reflect.DeepEqual(actual.Args, args) {
err := os.WriteFile(outputPath, []byte(output), 0600)
require.NoError(t, err)
err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0600)
require.NoError(t, err)
return nil
} else {
return fmt.Errorf("unexpected command: %v", actual.Args)
}
})
return ctx
}
func loadYaml(name string, content string) *bundle.Bundle {
v, diag := config.LoadFromBytes(name, []byte(content))
if diag.Error() != nil {
panic(diag.Error())
}
return &bundle.Bundle{
Config: *v,
}
}
func withFakeVEnv(t *testing.T, path string) {
interpreterPath := interpreterPath(path)
cwd, err := os.Getwd()
if err != nil {
panic(err)
}
if err := os.Chdir(t.TempDir()); err != nil {
panic(err)
}
err = os.MkdirAll(filepath.Dir(interpreterPath), 0755)
if err != nil {
panic(err)
}
err = os.WriteFile(interpreterPath, []byte(""), 0755)
if err != nil {
panic(err)
}
t.Cleanup(func() {
if err := os.Chdir(cwd); err != nil {
panic(err)
}
})
}

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/env"
"github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -34,7 +35,7 @@ func TestResolveClusterReference(t *testing.T) {
}, },
}, },
"some-variable": { "some-variable": {
Value: &justString, Value: justString,
}, },
}, },
}, },
@ -52,8 +53,8 @@ func TestResolveClusterReference(t *testing.T) {
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value) require.Equal(t, "1234-5678-abcd", b.Config.Variables["my-cluster-id-1"].Value)
require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value) require.Equal(t, "9876-5432-xywz", b.Config.Variables["my-cluster-id-2"].Value)
} }
func TestResolveNonExistentClusterReference(t *testing.T) { func TestResolveNonExistentClusterReference(t *testing.T) {
@ -68,7 +69,7 @@ func TestResolveNonExistentClusterReference(t *testing.T) {
}, },
}, },
"some-variable": { "some-variable": {
Value: &justString, Value: justString,
}, },
}, },
}, },
@ -104,7 +105,7 @@ func TestNoLookupIfVariableIsSet(t *testing.T) {
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value) require.Equal(t, "random value", b.Config.Variables["my-cluster-id"].Value)
} }
func TestResolveServicePrincipal(t *testing.T) { func TestResolveServicePrincipal(t *testing.T) {
@ -131,14 +132,11 @@ func TestResolveServicePrincipal(t *testing.T) {
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value) require.Equal(t, "app-1234", b.Config.Variables["my-sp"].Value)
} }
func TestResolveVariableReferencesInVariableLookups(t *testing.T) { func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
s := func(s string) *string { s := "bar"
return &s
}
b := &bundle.Bundle{ b := &bundle.Bundle{
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
@ -146,7 +144,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
}, },
Variables: map[string]*variable.Variable{ Variables: map[string]*variable.Variable{
"foo": { "foo": {
Value: s("bar"), Value: s,
}, },
"lookup": { "lookup": {
Lookup: &variable.Lookup{ Lookup: &variable.Lookup{
@ -167,7 +165,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences()))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster) require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster)
require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
} }
func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) { func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) {
@ -194,3 +192,30 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) {
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences()))
require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables") require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables")
} }
func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Target: "dev",
},
Variables: map[string]*variable.Variable{
"lookup": {
Lookup: &variable.Lookup{
Cluster: "cluster-${bundle.target}",
},
},
},
},
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
ctx := context.Background()
ctx = env.Set(ctx, "BUNDLE_VAR_lookup", "1234-5678-abcd")
diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences()))
require.NoError(t, diags.Error())
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
}

View File

@ -17,6 +17,7 @@ type resolveVariableReferences struct {
prefixes []string prefixes []string
pattern dyn.Pattern pattern dyn.Pattern
lookupFn func(dyn.Value, dyn.Path) (dyn.Value, error) lookupFn func(dyn.Value, dyn.Path) (dyn.Value, error)
skipFn func(dyn.Value) bool
} }
func ResolveVariableReferences(prefixes ...string) bundle.Mutator { func ResolveVariableReferences(prefixes ...string) bundle.Mutator {
@ -31,6 +32,18 @@ func ResolveVariableReferencesInLookup() bundle.Mutator {
}, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables} }, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables}
} }
func ResolveVariableReferencesInComplexVariables() bundle.Mutator {
return &resolveVariableReferences{prefixes: []string{
"bundle",
"workspace",
"variables",
},
pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")),
lookupFn: lookupForComplexVariables,
skipFn: skipResolvingInNonComplexVariables,
}
}
func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) { func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) {
// Future opportunity: if we lookup this path in both the given root // Future opportunity: if we lookup this path in both the given root
// and the synthesized root, we know if it was explicitly set or implied to be empty. // and the synthesized root, we know if it was explicitly set or implied to be empty.
@ -38,6 +51,38 @@ func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) {
return dyn.GetByPath(v, path) return dyn.GetByPath(v, path)
} }
func lookupForComplexVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
if path[0].Key() != "variables" {
return lookup(v, path)
}
varV, err := dyn.GetByPath(v, path[:len(path)-1])
if err != nil {
return dyn.InvalidValue, err
}
var vv variable.Variable
err = convert.ToTyped(&vv, varV)
if err != nil {
return dyn.InvalidValue, err
}
if vv.Type == variable.VariableTypeComplex {
return dyn.InvalidValue, fmt.Errorf("complex variables cannot contain references to another complex variables")
}
return lookup(v, path)
}
func skipResolvingInNonComplexVariables(v dyn.Value) bool {
switch v.Kind() {
case dyn.KindMap, dyn.KindSequence:
return false
default:
return true
}
}
func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) { func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
if path[0].Key() != "variables" { if path[0].Key() != "variables" {
return lookup(v, path) return lookup(v, path)
@ -100,17 +145,27 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
// Resolve variable references in all values. // Resolve variable references in all values.
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) { return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
// Rewrite the shorthand path ${var.foo} into ${variables.foo.value}. // Rewrite the shorthand path ${var.foo} into ${variables.foo.value}.
if path.HasPrefix(varPath) && len(path) == 2 { if path.HasPrefix(varPath) {
path = dyn.NewPath( newPath := dyn.NewPath(
dyn.Key("variables"), dyn.Key("variables"),
path[1], path[1],
dyn.Key("value"), dyn.Key("value"),
) )
if len(path) > 2 {
newPath = newPath.Append(path[2:]...)
}
path = newPath
} }
// Perform resolution only if the path starts with one of the specified prefixes. // Perform resolution only if the path starts with one of the specified prefixes.
for _, prefix := range prefixes { for _, prefix := range prefixes {
if path.HasPrefix(prefix) { if path.HasPrefix(prefix) {
// Skip resolution if there is a skip function and it returns true.
if m.skipFn != nil && m.skipFn(v) {
return dyn.InvalidValue, dynvar.ErrSkipResolution
}
return m.lookupFn(normalized, path) return m.lookupFn(normalized, path)
} }
} }

View File

@ -43,10 +43,6 @@ func TestResolveVariableReferences(t *testing.T) {
} }
func TestResolveVariableReferencesToBundleVariables(t *testing.T) { func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
s := func(s string) *string {
return &s
}
b := &bundle.Bundle{ b := &bundle.Bundle{
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
@ -57,7 +53,7 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
}, },
Variables: map[string]*variable.Variable{ Variables: map[string]*variable.Variable{
"foo": { "foo": {
Value: s("bar"), Value: "bar",
}, },
}, },
}, },
@ -195,3 +191,246 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
assert.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MaxWorkers) assert.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MaxWorkers)
assert.Equal(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice) assert.Equal(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice)
} }
func TestResolveComplexVariable(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "example",
},
Variables: map[string]*variable.Variable{
"cluster": {
Value: map[string]any{
"node_type_id": "Standard_DS3_v2",
"num_workers": 2,
},
Type: variable.VariableTypeComplex,
},
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
JobSettings: &jobs.JobSettings{
JobClusters: []jobs.JobCluster{
{
NewCluster: compute.ClusterSpec{
NodeTypeId: "random",
},
},
},
},
},
},
},
},
}
ctx := context.Background()
// Assign the variables to the dynamic configuration.
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var p dyn.Path
var err error
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]")
v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}"))
require.NoError(t, err)
return v, nil
})
return diag.FromErr(err)
})
require.NoError(t, diags.Error())
diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables"))
require.NoError(t, diags.Error())
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId)
require.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NumWorkers)
}
func TestResolveComplexVariableReferencesToFields(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "example",
},
Variables: map[string]*variable.Variable{
"cluster": {
Value: map[string]any{
"node_type_id": "Standard_DS3_v2",
"num_workers": 2,
},
Type: variable.VariableTypeComplex,
},
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
JobSettings: &jobs.JobSettings{
JobClusters: []jobs.JobCluster{
{
NewCluster: compute.ClusterSpec{
NodeTypeId: "random",
},
},
},
},
},
},
},
},
}
ctx := context.Background()
// Assign the variables to the dynamic configuration.
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var p dyn.Path
var err error
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0].new_cluster")
v, err = dyn.SetByPath(v, p.Append(dyn.Key("node_type_id")), dyn.V("${var.cluster.node_type_id}"))
require.NoError(t, err)
return v, nil
})
return diag.FromErr(err)
})
require.NoError(t, diags.Error())
diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables"))
require.NoError(t, diags.Error())
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId)
}
func TestResolveComplexVariableReferencesWithComplexVariablesError(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "example",
},
Variables: map[string]*variable.Variable{
"cluster": {
Value: map[string]any{
"node_type_id": "Standard_DS3_v2",
"num_workers": 2,
"spark_conf": "${var.spark_conf}",
},
Type: variable.VariableTypeComplex,
},
"spark_conf": {
Value: map[string]any{
"spark.executor.memory": "4g",
"spark.executor.cores": "2",
},
Type: variable.VariableTypeComplex,
},
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
JobSettings: &jobs.JobSettings{
JobClusters: []jobs.JobCluster{
{
NewCluster: compute.ClusterSpec{
NodeTypeId: "random",
},
},
},
},
},
},
},
},
}
ctx := context.Background()
// Assign the variables to the dynamic configuration.
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var p dyn.Path
var err error
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]")
v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}"))
require.NoError(t, err)
return v, nil
})
return diag.FromErr(err)
})
require.NoError(t, diags.Error())
diags = bundle.Apply(ctx, b, bundle.Seq(ResolveVariableReferencesInComplexVariables(), ResolveVariableReferences("bundle", "workspace", "variables")))
require.ErrorContains(t, diags.Error(), "complex variables cannot contain references to another complex variables")
}
func TestResolveComplexVariableWithVarReference(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "example",
},
Variables: map[string]*variable.Variable{
"package_version": {
Value: "1.0.0",
},
"cluster_libraries": {
Value: [](map[string]any){
{
"pypi": map[string]string{
"package": "cicd_template==${var.package_version}",
},
},
},
Type: variable.VariableTypeComplex,
},
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
{
Libraries: []compute.Library{},
},
},
},
},
},
},
},
}
ctx := context.Background()
// Assign the variables to the dynamic configuration.
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var p dyn.Path
var err error
p = dyn.MustPathFromString("resources.jobs.job1.tasks[0]")
v, err = dyn.SetByPath(v, p.Append(dyn.Key("libraries")), dyn.V("${var.cluster_libraries}"))
require.NoError(t, err)
return v, nil
})
return diag.FromErr(err)
})
require.NoError(t, diags.Error())
diags = bundle.Apply(ctx, b, bundle.Seq(
ResolveVariableReferencesInComplexVariables(),
ResolveVariableReferences("bundle", "workspace", "variables"),
))
require.NoError(t, diags.Error())
require.Equal(t, "cicd_template==1.0.0", b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].Libraries[0].Pypi.Package)
}

View File

@ -35,7 +35,7 @@ func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc {
dir := filepath.Dir(v.Location().File) dir := filepath.Dir(v.Location().File)
rel, err := filepath.Rel(root, dir) rel, err := filepath.Rel(root, dir)
if err != nil { if err != nil {
return dyn.NilValue, err return dyn.InvalidValue, err
} }
return dyn.NewValue(filepath.Join(rel, v.MustString()), v.Location()), nil return dyn.NewValue(filepath.Join(rel, v.MustString()), v.Location()), nil
@ -47,11 +47,11 @@ func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) diag.Dia
return dyn.Map(v, "sync", func(_ dyn.Path, v dyn.Value) (nv dyn.Value, err error) { return dyn.Map(v, "sync", func(_ dyn.Path, v dyn.Value) (nv dyn.Value, err error) {
v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.RootPath))) v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.RootPath)))
if err != nil { if err != nil {
return dyn.NilValue, err return dyn.InvalidValue, err
} }
v, err = dyn.Map(v, "exclude", dyn.Foreach(m.makeRelativeTo(b.RootPath))) v, err = dyn.Map(v, "exclude", dyn.Foreach(m.makeRelativeTo(b.RootPath)))
if err != nil { if err != nil {
return dyn.NilValue, err return dyn.InvalidValue, err
} }
return v, nil return v, nil
}) })

View File

@ -42,14 +42,26 @@ func reportRunAsNotSupported(resourceType string, location dyn.Location, current
func validateRunAs(b *bundle.Bundle) diag.Diagnostics { func validateRunAs(b *bundle.Bundle) diag.Diagnostics {
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
runAs := b.Config.RunAs
// Error if neither service_principal_name nor user_name are specified neitherSpecifiedErr := diag.Diagnostics{{
if runAs.ServicePrincipalName == "" && runAs.UserName == "" { Summary: "run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified",
diags = diags.Extend(diag.Errorf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s", b.Config.GetLocation("run_as"))) Location: b.Config.GetLocation("run_as"),
Severity: diag.Error,
}}
// Fail fast if neither service_principal_name nor user_name are specified, but the
// run_as section is present.
if b.Config.Value().Get("run_as").Kind() == dyn.KindNil {
return neitherSpecifiedErr
}
// Fail fast if one or both of service_principal_name and user_name are specified,
// but with empty values.
runAs := b.Config.RunAs
if runAs.ServicePrincipalName == "" && runAs.UserName == "" {
return neitherSpecifiedErr
} }
// Error if both service_principal_name and user_name are specified
if runAs.UserName != "" && runAs.ServicePrincipalName != "" { if runAs.UserName != "" && runAs.ServicePrincipalName != "" {
diags = diags.Extend(diag.Diagnostics{{ diags = diags.Extend(diag.Diagnostics{{
Summary: "run_as section cannot specify both user_name and service_principal_name", Summary: "run_as section cannot specify both user_name and service_principal_name",
@ -151,8 +163,7 @@ func setPipelineOwnersToRunAsIdentity(b *bundle.Bundle) {
func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
// Mutator is a no-op if run_as is not specified in the bundle // Mutator is a no-op if run_as is not specified in the bundle
runAs := b.Config.RunAs if b.Config.Value().Get("run_as").Kind() == dyn.KindInvalid {
if runAs == nil {
return nil return nil
} }

View File

@ -18,7 +18,7 @@ import (
func allResourceTypes(t *testing.T) []string { func allResourceTypes(t *testing.T) []string {
// Compute supported resource types based on the `Resources{}` struct. // Compute supported resource types based on the `Resources{}` struct.
r := config.Resources{} r := &config.Resources{}
rv, err := convert.FromTyped(r, dyn.NilValue) rv, err := convert.FromTyped(r, dyn.NilValue)
require.NoError(t, err) require.NoError(t, err)
normalized, _ := convert.Normalize(r, rv, convert.IncludeMissingFields) normalized, _ := convert.Normalize(r, rv, convert.IncludeMissingFields)
@ -154,6 +154,11 @@ func TestRunAsErrorForUnsupportedResources(t *testing.T) {
v, err := convert.FromTyped(base, dyn.NilValue) v, err := convert.FromTyped(base, dyn.NilValue)
require.NoError(t, err) require.NoError(t, err)
// Define top level resources key in the bundle configuration.
// This is not part of the typed configuration, so we need to add it manually.
v, err = dyn.Set(v, "resources", dyn.V(map[string]dyn.Value{}))
require.NoError(t, err)
for _, rt := range allResourceTypes(t) { for _, rt := range allResourceTypes(t) {
// Skip allowed resources // Skip allowed resources
if slices.Contains(allowList, rt) { if slices.Contains(allowList, rt) {

View File

@ -30,6 +30,10 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di
// case: read and set variable value from process environment // case: read and set variable value from process environment
envVarName := bundleVarPrefix + name envVarName := bundleVarPrefix + name
if val, ok := env.Lookup(ctx, envVarName); ok { if val, ok := env.Lookup(ctx, envVarName); ok {
if v.IsComplex() {
return diag.Errorf(`setting via environment variables (%s) is not supported for complex variable %s`, envVarName, name)
}
err := v.Set(val) err := v.Set(val)
if err != nil { if err != nil {
return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err) return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err)
@ -37,21 +41,21 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di
return nil return nil
} }
// case: Set the variable to its default value
if v.HasDefault() {
err := v.Set(*v.Default)
if err != nil {
return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, *v.Default, name, err)
}
return nil
}
// case: Defined a variable for named lookup for a resource // case: Defined a variable for named lookup for a resource
// It will be resolved later in ResolveResourceReferences mutator // It will be resolved later in ResolveResourceReferences mutator
if v.Lookup != nil { if v.Lookup != nil {
return nil return nil
} }
// case: Set the variable to its default value
if v.HasDefault() {
err := v.Set(v.Default)
if err != nil {
return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, v.Default, name, err)
}
return nil
}
// We should have had a value to set for the variable at this point. // We should have had a value to set for the variable at this point.
return diag.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) return diag.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
} }

View File

@ -15,7 +15,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
defaultVal := "default" defaultVal := "default"
variable := variable.Variable{ variable := variable.Variable{
Description: "a test variable", Description: "a test variable",
Default: &defaultVal, Default: defaultVal,
} }
// set value for variable as an environment variable // set value for variable as an environment variable
@ -23,19 +23,19 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
diags := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "process-env") assert.Equal(t, variable.Value, "process-env")
} }
func TestSetVariableUsingDefaultValue(t *testing.T) { func TestSetVariableUsingDefaultValue(t *testing.T) {
defaultVal := "default" defaultVal := "default"
variable := variable.Variable{ variable := variable.Variable{
Description: "a test variable", Description: "a test variable",
Default: &defaultVal, Default: defaultVal,
} }
diags := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "default") assert.Equal(t, variable.Value, "default")
} }
func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
@ -43,15 +43,15 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
val := "assigned-value" val := "assigned-value"
variable := variable.Variable{ variable := variable.Variable{
Description: "a test variable", Description: "a test variable",
Default: &defaultVal, Default: defaultVal,
Value: &val, Value: val,
} }
// since a value is already assigned to the variable, it would not be overridden // since a value is already assigned to the variable, it would not be overridden
// by the default value // by the default value
diags := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "assigned-value") assert.Equal(t, variable.Value, "assigned-value")
} }
func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
@ -59,8 +59,8 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
val := "assigned-value" val := "assigned-value"
variable := variable.Variable{ variable := variable.Variable{
Description: "a test variable", Description: "a test variable",
Default: &defaultVal, Default: defaultVal,
Value: &val, Value: val,
} }
// set value for variable as an environment variable // set value for variable as an environment variable
@ -70,7 +70,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
// by the value from environment // by the value from environment
diags := setVariable(context.Background(), &variable, "foo") diags := setVariable(context.Background(), &variable, "foo")
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, *variable.Value, "assigned-value") assert.Equal(t, variable.Value, "assigned-value")
} }
func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) { func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
@ -92,15 +92,15 @@ func TestSetVariablesMutator(t *testing.T) {
Variables: map[string]*variable.Variable{ Variables: map[string]*variable.Variable{
"a": { "a": {
Description: "resolved to default value", Description: "resolved to default value",
Default: &defaultValForA, Default: defaultValForA,
}, },
"b": { "b": {
Description: "resolved from environment vairables", Description: "resolved from environment vairables",
Default: &defaultValForB, Default: defaultValForB,
}, },
"c": { "c": {
Description: "has already been assigned a value", Description: "has already been assigned a value",
Value: &valForC, Value: valForC,
}, },
}, },
}, },
@ -110,7 +110,22 @@ func TestSetVariablesMutator(t *testing.T) {
diags := bundle.Apply(context.Background(), b, SetVariables()) diags := bundle.Apply(context.Background(), b, SetVariables())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) assert.Equal(t, "default-a", b.Config.Variables["a"].Value)
assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) assert.Equal(t, "env-var-b", b.Config.Variables["b"].Value)
assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value) assert.Equal(t, "assigned-val-c", b.Config.Variables["c"].Value)
}
func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) {
defaultVal := "default"
variable := variable.Variable{
Description: "a test variable",
Default: defaultVal,
Type: variable.VariableTypeComplex,
}
// set value for variable as an environment variable
t.Setenv("BUNDLE_VAR_foo", "process-env")
diags := setVariable(context.Background(), &variable, "foo")
assert.ErrorContains(t, diags.Error(), "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo")
} }

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"errors" "errors"
"fmt" "fmt"
"io/fs"
"net/url" "net/url"
"os"
"path" "path"
"path/filepath" "path/filepath"
"strings" "strings"
@ -32,9 +32,7 @@ func (err ErrIsNotNotebook) Error() string {
return fmt.Sprintf("file at %s is not a notebook", err.path) return fmt.Sprintf("file at %s is not a notebook", err.path)
} }
type translatePaths struct { type translatePaths struct{}
seen map[string]string
}
// TranslatePaths converts paths to local notebook files into paths in the workspace file system. // TranslatePaths converts paths to local notebook files into paths in the workspace file system.
func TranslatePaths() bundle.Mutator { func TranslatePaths() bundle.Mutator {
@ -47,6 +45,18 @@ func (m *translatePaths) Name() string {
type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (string, error) type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (string, error)
// translateContext is a context for rewriting paths in a config.
// It is freshly instantiated on every mutator apply call.
// It provides access to the underlying bundle object such that
// it doesn't have to be passed around explicitly.
type translateContext struct {
b *bundle.Bundle
// seen is a map of local paths to their corresponding remote paths.
// If a local path has already been successfully resolved, we do not need to resolve it again.
seen map[string]string
}
// rewritePath converts a given relative path from the loaded config to a new path based on the passed rewriting function // rewritePath converts a given relative path from the loaded config to a new path based on the passed rewriting function
// //
// It takes these arguments: // It takes these arguments:
@ -56,14 +66,13 @@ type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (
// This logic is different between regular files or notebooks. // This logic is different between regular files or notebooks.
// //
// The function returns an error if it is impossible to rewrite the given relative path. // The function returns an error if it is impossible to rewrite the given relative path.
func (m *translatePaths) rewritePath( func (t *translateContext) rewritePath(
dir string, dir string,
b *bundle.Bundle,
p *string, p *string,
fn rewriteFunc, fn rewriteFunc,
) error { ) error {
// We assume absolute paths point to a location in the workspace // We assume absolute paths point to a location in the workspace
if path.IsAbs(filepath.ToSlash(*p)) { if path.IsAbs(*p) {
return nil return nil
} }
@ -79,13 +88,14 @@ func (m *translatePaths) rewritePath(
// Local path is relative to the directory the resource was defined in. // Local path is relative to the directory the resource was defined in.
localPath := filepath.Join(dir, filepath.FromSlash(*p)) localPath := filepath.Join(dir, filepath.FromSlash(*p))
if interp, ok := m.seen[localPath]; ok { if interp, ok := t.seen[localPath]; ok {
*p = interp *p = interp
return nil return nil
} }
// Remote path must be relative to the bundle root. // Local path must be contained in the bundle root.
localRelPath, err := filepath.Rel(b.RootPath, localPath) // If it isn't, it won't be synchronized into the workspace.
localRelPath, err := filepath.Rel(t.b.RootPath, localPath)
if err != nil { if err != nil {
return err return err
} }
@ -94,22 +104,22 @@ func (m *translatePaths) rewritePath(
} }
// Prefix remote path with its remote root path. // Prefix remote path with its remote root path.
remotePath := path.Join(b.Config.Workspace.FilePath, filepath.ToSlash(localRelPath)) remotePath := path.Join(t.b.Config.Workspace.FilePath, filepath.ToSlash(localRelPath))
// Convert local path into workspace path via specified function. // Convert local path into workspace path via specified function.
interp, err := fn(*p, localPath, localRelPath, filepath.ToSlash(remotePath)) interp, err := fn(*p, localPath, localRelPath, remotePath)
if err != nil { if err != nil {
return err return err
} }
*p = interp *p = interp
m.seen[localPath] = interp t.seen[localPath] = interp
return nil return nil
} }
func translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { func (t *translateContext) translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
nb, _, err := notebook.Detect(localFullPath) nb, _, err := notebook.DetectWithFS(t.b.BundleRoot, filepath.ToSlash(localRelPath))
if os.IsNotExist(err) { if errors.Is(err, fs.ErrNotExist) {
return "", fmt.Errorf("notebook %s not found", literal) return "", fmt.Errorf("notebook %s not found", literal)
} }
if err != nil { if err != nil {
@ -123,9 +133,9 @@ func translateNotebookPath(literal, localFullPath, localRelPath, remotePath stri
return strings.TrimSuffix(remotePath, filepath.Ext(localFullPath)), nil return strings.TrimSuffix(remotePath, filepath.Ext(localFullPath)), nil
} }
func translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) { func (t *translateContext) translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
nb, _, err := notebook.Detect(localFullPath) nb, _, err := notebook.DetectWithFS(t.b.BundleRoot, filepath.ToSlash(localRelPath))
if os.IsNotExist(err) { if errors.Is(err, fs.ErrNotExist) {
return "", fmt.Errorf("file %s not found", literal) return "", fmt.Errorf("file %s not found", literal)
} }
if err != nil { if err != nil {
@ -137,8 +147,8 @@ func translateFilePath(literal, localFullPath, localRelPath, remotePath string)
return remotePath, nil return remotePath, nil
} }
func translateDirectoryPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { func (t *translateContext) translateDirectoryPath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
info, err := os.Stat(localFullPath) info, err := t.b.BundleRoot.Stat(filepath.ToSlash(localRelPath))
if err != nil { if err != nil {
return "", err return "", err
} }
@ -148,20 +158,20 @@ func translateDirectoryPath(literal, localFullPath, localRelPath, remotePath str
return remotePath, nil return remotePath, nil
} }
func translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) { func (t *translateContext) translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) {
return localRelPath, nil return localRelPath, nil
} }
func translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) { func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) {
if !strings.HasPrefix(localRelPath, ".") { if !strings.HasPrefix(localRelPath, ".") {
localRelPath = "." + string(filepath.Separator) + localRelPath localRelPath = "." + string(filepath.Separator) + localRelPath
} }
return localRelPath, nil return localRelPath, nil
} }
func (m *translatePaths) rewriteValue(b *bundle.Bundle, p dyn.Path, v dyn.Value, fn rewriteFunc, dir string) (dyn.Value, error) { func (t *translateContext) rewriteValue(p dyn.Path, v dyn.Value, fn rewriteFunc, dir string) (dyn.Value, error) {
out := v.MustString() out := v.MustString()
err := m.rewritePath(dir, b, &out, fn) err := t.rewritePath(dir, &out, fn)
if err != nil { if err != nil {
if target := (&ErrIsNotebook{}); errors.As(err, target) { if target := (&ErrIsNotebook{}); errors.As(err, target) {
return dyn.InvalidValue, fmt.Errorf(`expected a file for "%s" but got a notebook: %w`, p, target) return dyn.InvalidValue, fmt.Errorf(`expected a file for "%s" but got a notebook: %w`, p, target)
@ -175,15 +185,15 @@ func (m *translatePaths) rewriteValue(b *bundle.Bundle, p dyn.Path, v dyn.Value,
return dyn.NewValue(out, v.Location()), nil return dyn.NewValue(out, v.Location()), nil
} }
func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.Value, fn rewriteFunc, dir, fallback string) (dyn.Value, error) { func (t *translateContext) rewriteRelativeTo(p dyn.Path, v dyn.Value, fn rewriteFunc, dir, fallback string) (dyn.Value, error) {
nv, err := m.rewriteValue(b, p, v, fn, dir) nv, err := t.rewriteValue(p, v, fn, dir)
if err == nil { if err == nil {
return nv, nil return nv, nil
} }
// If we failed to rewrite the path, try to rewrite it relative to the fallback directory. // If we failed to rewrite the path, try to rewrite it relative to the fallback directory.
if fallback != "" { if fallback != "" {
nv, nerr := m.rewriteValue(b, p, v, fn, fallback) nv, nerr := t.rewriteValue(p, v, fn, fallback)
if nerr == nil { if nerr == nil {
// TODO: Emit a warning that this path should be rewritten. // TODO: Emit a warning that this path should be rewritten.
return nv, nil return nv, nil
@ -194,16 +204,19 @@ func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.V
} }
func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
m.seen = make(map[string]string) t := &translateContext{
b: b,
seen: make(map[string]string),
}
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var err error var err error
for _, fn := range []func(*bundle.Bundle, dyn.Value) (dyn.Value, error){ for _, fn := range []func(dyn.Value) (dyn.Value, error){
m.applyJobTranslations, t.applyJobTranslations,
m.applyPipelineTranslations, t.applyPipelineTranslations,
m.applyArtifactTranslations, t.applyArtifactTranslations,
} { } {
v, err = fn(b, v) v, err = fn(v)
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }

View File

@ -3,36 +3,42 @@ package mutator
import ( import (
"fmt" "fmt"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
func (m *translatePaths) applyArtifactTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) { type artifactRewritePattern struct {
var err error pattern dyn.Pattern
fn rewriteFunc
}
func (t *translateContext) artifactRewritePatterns() []artifactRewritePattern {
// Base pattern to match all artifacts. // Base pattern to match all artifacts.
base := dyn.NewPattern( base := dyn.NewPattern(
dyn.Key("artifacts"), dyn.Key("artifacts"),
dyn.AnyKey(), dyn.AnyKey(),
) )
for _, t := range []struct { // Compile list of configuration paths to rewrite.
pattern dyn.Pattern return []artifactRewritePattern{
fn rewriteFunc
}{
{ {
base.Append(dyn.Key("path")), base.Append(dyn.Key("path")),
translateNoOp, t.translateNoOp,
}, },
} { }
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { }
func (t *translateContext) applyArtifactTranslations(v dyn.Value) (dyn.Value, error) {
var err error
for _, rewritePattern := range t.artifactRewritePatterns() {
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[1].Key() key := p[1].Key()
dir, err := v.Location().Directory() dir, err := v.Location().Directory()
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for artifact %s: %w", key, err) return dyn.InvalidValue, fmt.Errorf("unable to determine directory for artifact %s: %w", key, err)
} }
return m.rewriteRelativeTo(b, p, v, t.fn, dir, "") return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, "")
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err

View File

@ -4,7 +4,6 @@ import (
"fmt" "fmt"
"slices" "slices"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
@ -19,55 +18,42 @@ func noSkipRewrite(string) bool {
return false return false
} }
func rewritePatterns(base dyn.Pattern) []jobRewritePattern { func rewritePatterns(t *translateContext, base dyn.Pattern) []jobRewritePattern {
return []jobRewritePattern{ return []jobRewritePattern{
{ {
base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")), base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")),
translateNotebookPath, t.translateNotebookPath,
noSkipRewrite, noSkipRewrite,
}, },
{ {
base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")), base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")),
translateFilePath, t.translateFilePath,
noSkipRewrite, noSkipRewrite,
}, },
{ {
base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")), base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")),
translateDirectoryPath, t.translateDirectoryPath,
noSkipRewrite, noSkipRewrite,
}, },
{ {
base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")), base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")),
translateFilePath, t.translateFilePath,
noSkipRewrite, noSkipRewrite,
}, },
{ {
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")), base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")),
translateNoOp, t.translateNoOp,
noSkipRewrite, noSkipRewrite,
}, },
{ {
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")), base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")),
translateNoOp, t.translateNoOp,
noSkipRewrite, noSkipRewrite,
}, },
} }
} }
func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) { func (t *translateContext) jobRewritePatterns() []jobRewritePattern {
fallback, err := gatherFallbackPaths(v, "jobs")
if err != nil {
return dyn.InvalidValue, err
}
// Do not translate job task paths if using Git source
var ignore []string
for key, job := range b.Config.Resources.Jobs {
if job.GitSource != nil {
ignore = append(ignore, key)
}
}
// Base pattern to match all tasks in all jobs. // Base pattern to match all tasks in all jobs.
base := dyn.NewPattern( base := dyn.NewPattern(
dyn.Key("resources"), dyn.Key("resources"),
@ -90,19 +76,38 @@ func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dy
dyn.Key("dependencies"), dyn.Key("dependencies"),
dyn.AnyIndex(), dyn.AnyIndex(),
), ),
translateNoOpWithPrefix, t.translateNoOpWithPrefix,
func(s string) bool { func(s string) bool {
return !libraries.IsEnvironmentDependencyLocal(s) return !libraries.IsEnvironmentDependencyLocal(s)
}, },
}, },
} }
taskPatterns := rewritePatterns(base)
forEachPatterns := rewritePatterns(base.Append(dyn.Key("for_each_task"), dyn.Key("task"))) taskPatterns := rewritePatterns(t, base)
forEachPatterns := rewritePatterns(t, base.Append(dyn.Key("for_each_task"), dyn.Key("task")))
allPatterns := append(taskPatterns, jobEnvironmentsPatterns...) allPatterns := append(taskPatterns, jobEnvironmentsPatterns...)
allPatterns = append(allPatterns, forEachPatterns...) allPatterns = append(allPatterns, forEachPatterns...)
return allPatterns
}
for _, t := range allPatterns { func (t *translateContext) applyJobTranslations(v dyn.Value) (dyn.Value, error) {
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { var err error
fallback, err := gatherFallbackPaths(v, "jobs")
if err != nil {
return dyn.InvalidValue, err
}
// Do not translate job task paths if using Git source
var ignore []string
for key, job := range t.b.Config.Resources.Jobs {
if job.GitSource != nil {
ignore = append(ignore, key)
}
}
for _, rewritePattern := range t.jobRewritePatterns() {
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[2].Key() key := p[2].Key()
// Skip path translation if the job is using git source. // Skip path translation if the job is using git source.
@ -116,10 +121,10 @@ func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dy
} }
sv := v.MustString() sv := v.MustString()
if t.skipRewrite(sv) { if rewritePattern.skipRewrite(sv) {
return v, nil return v, nil
} }
return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key]) return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key])
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err

View File

@ -3,16 +3,15 @@ package mutator
import ( import (
"fmt" "fmt"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
func (m *translatePaths) applyPipelineTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) { type pipelineRewritePattern struct {
fallback, err := gatherFallbackPaths(v, "pipelines") pattern dyn.Pattern
if err != nil { fn rewriteFunc
return dyn.InvalidValue, err }
}
func (t *translateContext) pipelineRewritePatterns() []pipelineRewritePattern {
// Base pattern to match all libraries in all pipelines. // Base pattern to match all libraries in all pipelines.
base := dyn.NewPattern( base := dyn.NewPattern(
dyn.Key("resources"), dyn.Key("resources"),
@ -22,27 +21,36 @@ func (m *translatePaths) applyPipelineTranslations(b *bundle.Bundle, v dyn.Value
dyn.AnyIndex(), dyn.AnyIndex(),
) )
for _, t := range []struct { // Compile list of configuration paths to rewrite.
pattern dyn.Pattern return []pipelineRewritePattern{
fn rewriteFunc
}{
{ {
base.Append(dyn.Key("notebook"), dyn.Key("path")), base.Append(dyn.Key("notebook"), dyn.Key("path")),
translateNotebookPath, t.translateNotebookPath,
}, },
{ {
base.Append(dyn.Key("file"), dyn.Key("path")), base.Append(dyn.Key("file"), dyn.Key("path")),
translateFilePath, t.translateFilePath,
}, },
} { }
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { }
func (t *translateContext) applyPipelineTranslations(v dyn.Value) (dyn.Value, error) {
var err error
fallback, err := gatherFallbackPaths(v, "pipelines")
if err != nil {
return dyn.InvalidValue, err
}
for _, rewritePattern := range t.pipelineRewritePatterns() {
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[2].Key() key := p[2].Key()
dir, err := v.Location().Directory() dir, err := v.Location().Directory()
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err) return dyn.InvalidValue, fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err)
} }
return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key]) return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key])
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err

View File

@ -12,6 +12,7 @@ import (
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest" "github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
@ -37,7 +38,8 @@ func touchEmptyFile(t *testing.T, path string) {
func TestTranslatePathsSkippedWithGitSource(t *testing.T) { func TestTranslatePathsSkippedWithGitSource(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -107,7 +109,8 @@ func TestTranslatePaths(t *testing.T) {
touchEmptyFile(t, filepath.Join(dir, "dist", "task.jar")) touchEmptyFile(t, filepath.Join(dir, "dist", "task.jar"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -274,7 +277,8 @@ func TestTranslatePathsInSubdirectories(t *testing.T) {
touchEmptyFile(t, filepath.Join(dir, "job", "my_dbt_project", "dbt_project.yml")) touchEmptyFile(t, filepath.Join(dir, "job", "my_dbt_project", "dbt_project.yml"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -368,7 +372,8 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -401,7 +406,8 @@ func TestJobNotebookDoesNotExistError(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -431,7 +437,8 @@ func TestJobFileDoesNotExistError(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -461,7 +468,8 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
@ -491,7 +499,8 @@ func TestPipelineFileDoesNotExistError(t *testing.T) {
dir := t.TempDir() dir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
@ -522,7 +531,8 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) {
touchNotebookFile(t, filepath.Join(dir, "my_notebook.py")) touchNotebookFile(t, filepath.Join(dir, "my_notebook.py"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -556,7 +566,8 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) {
touchEmptyFile(t, filepath.Join(dir, "my_file.py")) touchEmptyFile(t, filepath.Join(dir, "my_file.py"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -590,7 +601,8 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
touchEmptyFile(t, filepath.Join(dir, "my_file.py")) touchEmptyFile(t, filepath.Join(dir, "my_file.py"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -624,7 +636,8 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
touchNotebookFile(t, filepath.Join(dir, "my_notebook.py")) touchNotebookFile(t, filepath.Join(dir, "my_notebook.py"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
FilePath: "/bundle", FilePath: "/bundle",
@ -659,7 +672,8 @@ func TestTranslatePathJobEnvironments(t *testing.T) {
touchEmptyFile(t, filepath.Join(dir, "env2.py")) touchEmptyFile(t, filepath.Join(dir, "env2.py"))
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
BundleRoot: vfs.MustNew(dir),
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{

View File

@ -74,6 +74,10 @@ func Load(path string) (*Root, diag.Diagnostics) {
return nil, diag.FromErr(err) return nil, diag.FromErr(err)
} }
return LoadFromBytes(path, raw)
}
func LoadFromBytes(path string, raw []byte) (*Root, diag.Diagnostics) {
r := Root{} r := Root{}
// Load configuration tree from YAML. // Load configuration tree from YAML.
@ -263,6 +267,11 @@ func (r *Root) InitializeVariables(vars []string) error {
if _, ok := r.Variables[name]; !ok { if _, ok := r.Variables[name]; !ok {
return fmt.Errorf("variable %s has not been defined", name) return fmt.Errorf("variable %s has not been defined", name)
} }
if r.Variables[name].IsComplex() {
return fmt.Errorf("setting variables of complex type via --var flag is not supported: %s", name)
}
err := r.Variables[name].Set(val) err := r.Variables[name].Set(val)
if err != nil { if err != nil {
return fmt.Errorf("failed to assign %s to %s: %s", val, name, err) return fmt.Errorf("failed to assign %s to %s: %s", val, name, err)
@ -329,15 +338,38 @@ func (r *Root) MergeTargetOverrides(name string) error {
"resources", "resources",
"sync", "sync",
"permissions", "permissions",
"variables",
} { } {
if root, err = mergeField(root, target, f); err != nil { if root, err = mergeField(root, target, f); err != nil {
return err return err
} }
} }
// Merge `variables`. This field must be overwritten if set, not merged.
if v := target.Get("variables"); v.Kind() != dyn.KindInvalid {
_, err = dyn.Map(v, ".", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
varPath := dyn.MustPathFromString("variables").Append(p...)
vDefault := variable.Get("default")
if vDefault.Kind() != dyn.KindInvalid {
defaultPath := varPath.Append(dyn.Key("default"))
root, err = dyn.SetByPath(root, defaultPath, vDefault)
}
vLookup := variable.Get("lookup")
if vLookup.Kind() != dyn.KindInvalid {
lookupPath := varPath.Append(dyn.Key("lookup"))
root, err = dyn.SetByPath(root, lookupPath, vLookup)
}
return root, err
}))
if err != nil {
return err
}
}
// Merge `run_as`. This field must be overwritten if set, not merged. // Merge `run_as`. This field must be overwritten if set, not merged.
if v := target.Get("run_as"); v != dyn.NilValue { if v := target.Get("run_as"); v.Kind() != dyn.KindInvalid {
root, err = dyn.Set(root, "run_as", v) root, err = dyn.Set(root, "run_as", v)
if err != nil { if err != nil {
return err return err
@ -345,7 +377,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
} }
// Below, we're setting fields on the bundle key, so make sure it exists. // Below, we're setting fields on the bundle key, so make sure it exists.
if root.Get("bundle") == dyn.NilValue { if root.Get("bundle").Kind() == dyn.KindInvalid {
root, err = dyn.Set(root, "bundle", dyn.NewValue(map[string]dyn.Value{}, dyn.Location{})) root, err = dyn.Set(root, "bundle", dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}))
if err != nil { if err != nil {
return err return err
@ -353,7 +385,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
} }
// Merge `mode`. This field must be overwritten if set, not merged. // Merge `mode`. This field must be overwritten if set, not merged.
if v := target.Get("mode"); v != dyn.NilValue { if v := target.Get("mode"); v.Kind() != dyn.KindInvalid {
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("mode")), v) root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("mode")), v)
if err != nil { if err != nil {
return err return err
@ -361,7 +393,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
} }
// Merge `compute_id`. This field must be overwritten if set, not merged. // Merge `compute_id`. This field must be overwritten if set, not merged.
if v := target.Get("compute_id"); v != dyn.NilValue { if v := target.Get("compute_id"); v.Kind() != dyn.KindInvalid {
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("compute_id")), v) root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("compute_id")), v)
if err != nil { if err != nil {
return err return err
@ -369,7 +401,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
} }
// Merge `git`. // Merge `git`.
if v := target.Get("git"); v != dyn.NilValue { if v := target.Get("git"); v.Kind() != dyn.KindInvalid {
ref, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git"))) ref, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git")))
if err != nil { if err != nil {
ref = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) ref = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{})
@ -382,7 +414,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
} }
// If the branch was overridden, we need to clear the inferred flag. // If the branch was overridden, we need to clear the inferred flag.
if branch := v.Get("branch"); branch != dyn.NilValue { if branch := v.Get("branch"); branch.Kind() != dyn.KindInvalid {
out, err = dyn.SetByPath(out, dyn.NewPath(dyn.Key("inferred")), dyn.NewValue(false, dyn.Location{})) out, err = dyn.SetByPath(out, dyn.NewPath(dyn.Key("inferred")), dyn.NewValue(false, dyn.Location{}))
if err != nil { if err != nil {
return err return err
@ -410,12 +442,12 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
// For each target, rewrite the variables block. // For each target, rewrite the variables block.
return dyn.Map(v, "targets", dyn.Foreach(func(_ dyn.Path, target dyn.Value) (dyn.Value, error) { return dyn.Map(v, "targets", dyn.Foreach(func(_ dyn.Path, target dyn.Value) (dyn.Value, error) {
// Confirm it has a variables block. // Confirm it has a variables block.
if target.Get("variables") == dyn.NilValue { if target.Get("variables").Kind() == dyn.KindInvalid {
return target, nil return target, nil
} }
// For each variable, normalize its contents if it is a single string. // For each variable, normalize its contents if it is a single string.
return dyn.Map(target, "variables", dyn.Foreach(func(_ dyn.Path, variable dyn.Value) (dyn.Value, error) { return dyn.Map(target, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
switch variable.Kind() { switch variable.Kind() {
case dyn.KindString, dyn.KindBool, dyn.KindFloat, dyn.KindInt: case dyn.KindString, dyn.KindBool, dyn.KindFloat, dyn.KindInt:
@ -426,6 +458,22 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
"default": variable, "default": variable,
}, variable.Location()), nil }, variable.Location()), nil
case dyn.KindMap, dyn.KindSequence:
// Check if the original definition of variable has a type field.
typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type")))
if err != nil {
return variable, nil
}
if typeV.MustString() == "complex" {
return dyn.NewValue(map[string]dyn.Value{
"type": typeV,
"default": variable,
}, variable.Location()), nil
}
return variable, nil
default: default:
return variable, nil return variable, nil
} }
@ -440,15 +488,19 @@ func validateVariableOverrides(root, target dyn.Value) (err error) {
var tv map[string]variable.Variable var tv map[string]variable.Variable
// Collect variables from the root. // Collect variables from the root.
err = convert.ToTyped(&rv, root.Get("variables")) if v := root.Get("variables"); v.Kind() != dyn.KindInvalid {
if err != nil { err = convert.ToTyped(&rv, v)
return fmt.Errorf("unable to collect variables from root: %w", err) if err != nil {
return fmt.Errorf("unable to collect variables from root: %w", err)
}
} }
// Collect variables from the target. // Collect variables from the target.
err = convert.ToTyped(&tv, target.Get("variables")) if v := target.Get("variables"); v.Kind() != dyn.KindInvalid {
if err != nil { err = convert.ToTyped(&tv, v)
return fmt.Errorf("unable to collect variables from target: %w", err) if err != nil {
return fmt.Errorf("unable to collect variables from target: %w", err)
}
} }
// Check that all variables in the target exist in the root. // Check that all variables in the target exist in the root.
@ -471,3 +523,9 @@ func (r Root) GetLocation(path string) dyn.Location {
} }
return v.Location() return v.Location()
} }
// Value returns the dynamic configuration value of the root object. This value
// is the source of truth and is kept in sync with values in the typed configuration.
func (r Root) Value() dyn.Value {
return r.value
}

View File

@ -51,7 +51,7 @@ func TestInitializeVariables(t *testing.T) {
root := &Root{ root := &Root{
Variables: map[string]*variable.Variable{ Variables: map[string]*variable.Variable{
"foo": { "foo": {
Default: &fooDefault, Default: fooDefault,
Description: "an optional variable since default is defined", Description: "an optional variable since default is defined",
}, },
"bar": { "bar": {
@ -62,8 +62,8 @@ func TestInitializeVariables(t *testing.T) {
err := root.InitializeVariables([]string{"foo=123", "bar=456"}) err := root.InitializeVariables([]string{"foo=123", "bar=456"})
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, "123", *(root.Variables["foo"].Value)) assert.Equal(t, "123", (root.Variables["foo"].Value))
assert.Equal(t, "456", *(root.Variables["bar"].Value)) assert.Equal(t, "456", (root.Variables["bar"].Value))
} }
func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) { func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) {
@ -77,7 +77,7 @@ func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) {
err := root.InitializeVariables([]string{"foo=123=567"}) err := root.InitializeVariables([]string{"foo=123=567"})
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, "123=567", *(root.Variables["foo"].Value)) assert.Equal(t, "123=567", (root.Variables["foo"].Value))
} }
func TestInitializeVariablesInvalidFormat(t *testing.T) { func TestInitializeVariablesInvalidFormat(t *testing.T) {
@ -119,3 +119,69 @@ func TestRootMergeTargetOverridesWithMode(t *testing.T) {
require.NoError(t, root.MergeTargetOverrides("development")) require.NoError(t, root.MergeTargetOverrides("development"))
assert.Equal(t, Development, root.Bundle.Mode) assert.Equal(t, Development, root.Bundle.Mode)
} }
func TestInitializeComplexVariablesViaFlagIsNotAllowed(t *testing.T) {
root := &Root{
Variables: map[string]*variable.Variable{
"foo": {
Type: variable.VariableTypeComplex,
},
},
}
err := root.InitializeVariables([]string{"foo=123"})
assert.ErrorContains(t, err, "setting variables of complex type via --var flag is not supported: foo")
}
func TestRootMergeTargetOverridesWithVariables(t *testing.T) {
root := &Root{
Bundle: Bundle{},
Variables: map[string]*variable.Variable{
"foo": {
Default: "foo",
Description: "foo var",
},
"foo2": {
Default: "foo2",
Description: "foo2 var",
},
"complex": {
Type: variable.VariableTypeComplex,
Description: "complex var",
Default: map[string]interface{}{
"key": "value",
},
},
},
Targets: map[string]*Target{
"development": {
Variables: map[string]*variable.Variable{
"foo": {
Default: "bar",
Description: "wrong",
},
"complex": {
Type: "wrong",
Description: "wrong",
Default: map[string]interface{}{
"key1": "value1",
},
},
},
},
},
}
root.initializeDynamicValue()
require.NoError(t, root.MergeTargetOverrides("development"))
assert.Equal(t, "bar", root.Variables["foo"].Default)
assert.Equal(t, "foo var", root.Variables["foo"].Description)
assert.Equal(t, "foo2", root.Variables["foo2"].Default)
assert.Equal(t, "foo2 var", root.Variables["foo2"].Description)
assert.Equal(t, map[string]interface{}{
"key1": "value1",
}, root.Variables["complex"].Default)
assert.Equal(t, "complex var", root.Variables["complex"].Description)
}

View File

@ -8,7 +8,6 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/fileset" "github.com/databricks/cli/libs/fileset"
"github.com/databricks/cli/libs/vfs"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
@ -51,7 +50,7 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di
index := i index := i
p := pattern p := pattern
errs.Go(func() error { errs.Go(func() error {
fs, err := fileset.NewGlobSet(vfs.MustNew(rb.RootPath()), []string{p}) fs, err := fileset.NewGlobSet(rb.BundleRoot(), []string{p})
if err != nil { if err != nil {
return err return err
} }

View File

@ -2,12 +2,27 @@ package variable
import ( import (
"fmt" "fmt"
"reflect"
)
// We are using `any` because since introduction of complex variables,
// variables can be of any type.
// Type alias is used to make it easier to understand the code.
type VariableValue = any
type VariableType string
const (
VariableTypeComplex VariableType = "complex"
) )
// An input variable for the bundle config // An input variable for the bundle config
type Variable struct { type Variable struct {
// A type of the variable. This is used to validate the value of the variable
Type VariableType `json:"type,omitempty"`
// A default value which then makes the variable optional // A default value which then makes the variable optional
Default *string `json:"default,omitempty"` Default VariableValue `json:"default,omitempty"`
// Documentation for this input variable // Documentation for this input variable
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
@ -21,7 +36,7 @@ type Variable struct {
// 4. Default value defined in variable definition // 4. Default value defined in variable definition
// 5. Throw error, since if no default value is defined, then the variable // 5. Throw error, since if no default value is defined, then the variable
// is required // is required
Value *string `json:"value,omitempty" bundle:"readonly"` Value VariableValue `json:"value,omitempty" bundle:"readonly"`
// The value of this field will be used to lookup the resource by name // The value of this field will be used to lookup the resource by name
// And assign the value of the variable to ID of the resource found. // And assign the value of the variable to ID of the resource found.
@ -39,10 +54,24 @@ func (v *Variable) HasValue() bool {
return v.Value != nil return v.Value != nil
} }
func (v *Variable) Set(val string) error { func (v *Variable) Set(val VariableValue) error {
if v.HasValue() { if v.HasValue() {
return fmt.Errorf("variable has already been assigned value: %s", *v.Value) return fmt.Errorf("variable has already been assigned value: %s", v.Value)
} }
v.Value = &val
rv := reflect.ValueOf(val)
switch rv.Kind() {
case reflect.Struct, reflect.Array, reflect.Slice, reflect.Map:
if v.Type != VariableTypeComplex {
return fmt.Errorf("variable type is not complex")
}
}
v.Value = val
return nil return nil
} }
func (v *Variable) IsComplex() bool {
return v.Type == VariableTypeComplex
}

View File

@ -2,7 +2,9 @@ package files
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"io/fs"
"os" "os"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -67,7 +69,7 @@ func deleteSnapshotFile(ctx context.Context, b *bundle.Bundle) error {
return err return err
} }
err = os.Remove(sp) err = os.Remove(sp)
if err != nil && !os.IsNotExist(err) { if err != nil && !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("failed to destroy sync snapshot file: %s", err) return fmt.Errorf("failed to destroy sync snapshot file: %s", err)
} }
return nil return nil

View File

@ -6,7 +6,6 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
"github.com/databricks/cli/libs/vfs"
) )
func GetSync(ctx context.Context, rb bundle.ReadOnlyBundle) (*sync.Sync, error) { func GetSync(ctx context.Context, rb bundle.ReadOnlyBundle) (*sync.Sync, error) {
@ -29,7 +28,7 @@ func GetSyncOptions(ctx context.Context, rb bundle.ReadOnlyBundle) (*sync.SyncOp
} }
opts := &sync.SyncOptions{ opts := &sync.SyncOptions{
LocalPath: vfs.MustNew(rb.RootPath()), LocalPath: rb.BundleRoot(),
RemotePath: rb.Config().Workspace.FilePath, RemotePath: rb.Config().Workspace.FilePath,
Include: includes, Include: includes,
Exclude: rb.Config().Sync.Exclude, Exclude: rb.Config().Sync.Exclude,

View File

@ -26,7 +26,7 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return diag.FromErr(err) return diag.FromErr(err)
} }
err = sync.RunOnce(ctx) b.Files, err = sync.RunOnce(ctx)
if err != nil { if err != nil {
if errors.Is(err, fs.ErrPermission) { if errors.Is(err, fs.ErrPermission) {
return permissions.ReportPermissionDenied(ctx, b, b.Config.Workspace.StatePath) return permissions.ReportPermissionDenied(ctx, b, b.Config.Workspace.StatePath)

View File

@ -6,7 +6,6 @@ import (
"fmt" "fmt"
"io" "io"
"io/fs" "io/fs"
"os"
"path/filepath" "path/filepath"
"time" "time"
@ -59,8 +58,8 @@ type entry struct {
info fs.FileInfo info fs.FileInfo
} }
func newEntry(path string) *entry { func newEntry(root vfs.Path, path string) *entry {
info, err := os.Stat(path) info, err := root.Stat(path)
if err != nil { if err != nil {
return &entry{path, nil} return &entry{path, nil}
} }
@ -111,11 +110,10 @@ func FromSlice(files []fileset.File) (Filelist, error) {
return f, nil return f, nil
} }
func (f Filelist) ToSlice(basePath string) []fileset.File { func (f Filelist) ToSlice(root vfs.Path) []fileset.File {
var files []fileset.File var files []fileset.File
root := vfs.MustNew(basePath)
for _, file := range f { for _, file := range f {
entry := newEntry(filepath.Join(basePath, file.LocalPath)) entry := newEntry(root, filepath.ToSlash(file.LocalPath))
// Snapshots created with versions <= v0.220.0 use platform-specific // Snapshots created with versions <= v0.220.0 use platform-specific
// paths (i.e. with backslashes). Files returned by [libs/fileset] always // paths (i.e. with backslashes). Files returned by [libs/fileset] always

View File

@ -85,7 +85,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
} }
log.Infof(ctx, "Creating new snapshot") log.Infof(ctx, "Creating new snapshot")
snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.RootPath), opts) snapshot, err := sync.NewSnapshot(state.Files.ToSlice(b.BundleRoot), opts)
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -4,7 +4,9 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"errors"
"io" "io"
"io/fs"
"os" "os"
"testing" "testing"
@ -15,6 +17,7 @@ import (
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -57,8 +60,10 @@ func testStatePull(t *testing.T, opts statePullOpts) {
return f, nil return f, nil
}} }}
tmpDir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: t.TempDir(), RootPath: tmpDir,
BundleRoot: vfs.MustNew(tmpDir),
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
Target: "default", Target: "default",
@ -270,7 +275,7 @@ func TestStatePullNoState(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
_, err = os.Stat(statePath) _, err = os.Stat(statePath)
require.True(t, os.IsNotExist(err)) require.True(t, errors.Is(err, fs.ErrNotExist))
} }
func TestStatePullOlderState(t *testing.T) { func TestStatePullOlderState(t *testing.T) {

View File

@ -32,7 +32,8 @@ func TestFromSlice(t *testing.T) {
func TestToSlice(t *testing.T) { func TestToSlice(t *testing.T) {
tmpDir := t.TempDir() tmpDir := t.TempDir()
fileset := fileset.New(vfs.MustNew(tmpDir)) root := vfs.MustNew(tmpDir)
fileset := fileset.New(root)
testutil.Touch(t, tmpDir, "test1.py") testutil.Touch(t, tmpDir, "test1.py")
testutil.Touch(t, tmpDir, "test2.py") testutil.Touch(t, tmpDir, "test2.py")
testutil.Touch(t, tmpDir, "test3.py") testutil.Touch(t, tmpDir, "test3.py")
@ -44,7 +45,7 @@ func TestToSlice(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Len(t, f, 3) require.Len(t, f, 3)
s := f.ToSlice(tmpDir) s := f.ToSlice(root)
require.Len(t, s, 3) require.Len(t, s, 3)
for _, file := range s { for _, file := range s {

View File

@ -4,12 +4,13 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"errors"
"io" "io"
"io/fs"
"os" "os"
"time" "time"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/internal/build" "github.com/databricks/cli/internal/build"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
@ -38,19 +39,8 @@ func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost
state.CliVersion = build.GetInfo().Version state.CliVersion = build.GetInfo().Version
state.Version = DeploymentStateVersion state.Version = DeploymentStateVersion
// Get the current file list. // Update the state with the current list of synced files.
sync, err := files.GetSync(ctx, bundle.ReadOnly(b)) fl, err := FromSlice(b.Files)
if err != nil {
return diag.FromErr(err)
}
files, err := sync.GetFileList(ctx)
if err != nil {
return diag.FromErr(err)
}
// Update the state with the current file list.
fl, err := FromSlice(files)
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }
@ -95,7 +85,7 @@ func load(ctx context.Context, b *bundle.Bundle) (*DeploymentState, error) {
log.Infof(ctx, "Loading deployment state from %s", statePath) log.Infof(ctx, "Loading deployment state from %s", statePath)
f, err := os.Open(statePath) f, err := os.Open(statePath)
if err != nil { if err != nil {
if os.IsNotExist(err) { if errors.Is(err, fs.ErrNotExist) {
log.Infof(ctx, "No deployment state file found") log.Infof(ctx, "No deployment state file found")
return &DeploymentState{ return &DeploymentState{
Version: DeploymentStateVersion, Version: DeploymentStateVersion,

View File

@ -10,19 +10,23 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/internal/build" "github.com/databricks/cli/internal/build"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
databrickscfg "github.com/databricks/databricks-sdk-go/config" "github.com/databricks/cli/libs/fileset"
"github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestStateUpdate(t *testing.T) { func setupBundleForStateUpdate(t *testing.T) *bundle.Bundle {
s := &stateUpdate{} tmpDir := t.TempDir()
b := &bundle.Bundle{ testutil.Touch(t, tmpDir, "test1.py")
RootPath: t.TempDir(), testutil.TouchNotebook(t, tmpDir, "test2.py")
files, err := fileset.New(vfs.MustNew(tmpDir)).All()
require.NoError(t, err)
return &bundle.Bundle{
RootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
Target: "default", Target: "default",
@ -37,22 +41,14 @@ func TestStateUpdate(t *testing.T) {
}, },
}, },
}, },
Files: files,
} }
}
testutil.Touch(t, b.RootPath, "test1.py") func TestStateUpdate(t *testing.T) {
testutil.Touch(t, b.RootPath, "test2.py") s := &stateUpdate{}
m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &databrickscfg.Config{
Host: "https://test.com",
}
b.SetWorkpaceClient(m.WorkspaceClient)
wsApi := m.GetMockWorkspaceAPI()
wsApi.EXPECT().GetStatusByPath(mock.Anything, "/files").Return(&workspace.ObjectInfo{
ObjectType: "DIRECTORY",
}, nil)
b := setupBundleForStateUpdate(t)
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)
@ -63,7 +59,15 @@ func TestStateUpdate(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, int64(1), state.Seq) require.Equal(t, int64(1), state.Seq)
require.Len(t, state.Files, 3) require.Equal(t, state.Files, Filelist{
{
LocalPath: "test1.py",
},
{
LocalPath: "test2.py",
IsNotebook: true,
},
})
require.Equal(t, build.GetInfo().Version, state.CliVersion) require.Equal(t, build.GetInfo().Version, state.CliVersion)
diags = bundle.Apply(ctx, b, s) diags = bundle.Apply(ctx, b, s)
@ -74,45 +78,22 @@ func TestStateUpdate(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, int64(2), state.Seq) require.Equal(t, int64(2), state.Seq)
require.Len(t, state.Files, 3) require.Equal(t, state.Files, Filelist{
{
LocalPath: "test1.py",
},
{
LocalPath: "test2.py",
IsNotebook: true,
},
})
require.Equal(t, build.GetInfo().Version, state.CliVersion) require.Equal(t, build.GetInfo().Version, state.CliVersion)
} }
func TestStateUpdateWithExistingState(t *testing.T) { func TestStateUpdateWithExistingState(t *testing.T) {
s := &stateUpdate{} s := &stateUpdate{}
b := &bundle.Bundle{ b := setupBundleForStateUpdate(t)
RootPath: t.TempDir(),
Config: config.Root{
Bundle: config.Bundle{
Target: "default",
},
Workspace: config.Workspace{
StatePath: "/state",
FilePath: "/files",
CurrentUser: &config.User{
User: &iam.User{
UserName: "test-user",
},
},
},
},
}
testutil.Touch(t, b.RootPath, "test1.py")
testutil.Touch(t, b.RootPath, "test2.py")
m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &databrickscfg.Config{
Host: "https://test.com",
}
b.SetWorkpaceClient(m.WorkspaceClient)
wsApi := m.GetMockWorkspaceAPI()
wsApi.EXPECT().GetStatusByPath(mock.Anything, "/files").Return(&workspace.ObjectInfo{
ObjectType: "DIRECTORY",
}, nil)
ctx := context.Background() ctx := context.Background()
// Create an existing state file. // Create an existing state file.
@ -144,6 +125,14 @@ func TestStateUpdateWithExistingState(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, int64(11), state.Seq) require.Equal(t, int64(11), state.Seq)
require.Len(t, state.Files, 3) require.Equal(t, state.Files, Filelist{
{
LocalPath: "test1.py",
},
{
LocalPath: "test2.py",
IsNotebook: true,
},
})
require.Equal(t, build.GetInfo().Version, state.CliVersion) require.Equal(t, build.GetInfo().Version, state.CliVersion)
} }

View File

@ -455,6 +455,24 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
var src = resources.ModelServingEndpoint{ var src = resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name", Name: "name",
// Need to specify this to satisfy the equivalence test:
// The previous method of generation includes the "create" field
// because it is required (not marked as `omitempty`).
// The previous method used [json.Marshal] from the standard library
// and as such observed the `omitempty` tag.
// The new method leverages [dyn.Value] where any field that is not
// explicitly set is not part of the value.
Config: serving.EndpointCoreConfigInput{
ServedModels: []serving.ServedModelInput{
{
ModelName: "model_name",
ModelVersion: "1",
ScaleToZeroEnabled: true,
WorkloadSize: "Small",
},
},
},
}, },
Permissions: []resources.Permission{ Permissions: []resources.Permission{
{ {

View File

@ -2,7 +2,9 @@ package terraform
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"io/fs"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
@ -59,7 +61,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con
// If the execPath already exists, return it. // If the execPath already exists, return it.
execPath := filepath.Join(binDir, product.Terraform.BinaryName()) execPath := filepath.Join(binDir, product.Terraform.BinaryName())
_, err = os.Stat(execPath) _, err = os.Stat(execPath)
if err != nil && !os.IsNotExist(err) { if err != nil && !errors.Is(err, fs.ErrNotExist) {
return "", err return "", err
} }
if err == nil { if err == nil {
@ -148,7 +150,7 @@ func getEnvVarWithMatchingVersion(ctx context.Context, envVarName string, versio
// If the path does not exist, we return early. // If the path does not exist, we return early.
_, err := os.Stat(envValue) _, err := os.Stat(envValue)
if err != nil { if err != nil {
if os.IsNotExist(err) { if errors.Is(err, fs.ErrNotExist) {
log.Debugf(ctx, "%s at %s does not exist", envVarName, envValue) log.Debugf(ctx, "%s at %s does not exist", envVarName, envValue)
return "", nil return "", nil
} else { } else {
@ -216,6 +218,23 @@ func setProxyEnvVars(ctx context.Context, environ map[string]string, b *bundle.B
return nil return nil
} }
func setUserAgentExtraEnvVar(environ map[string]string, b *bundle.Bundle) error {
var products []string
if experimental := b.Config.Experimental; experimental != nil {
if experimental.PyDABs.Enabled {
products = append(products, "databricks-pydabs/0.0.0")
}
}
userAgentExtra := strings.Join(products, " ")
if userAgentExtra != "" {
environ["DATABRICKS_USER_AGENT_EXTRA"] = userAgentExtra
}
return nil
}
func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
tfConfig := b.Config.Bundle.Terraform tfConfig := b.Config.Bundle.Terraform
if tfConfig == nil { if tfConfig == nil {
@ -260,6 +279,11 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnosti
return diag.FromErr(err) return diag.FromErr(err)
} }
err = setUserAgentExtraEnvVar(environ, b)
if err != nil {
return diag.FromErr(err)
}
// Configure environment variables for auth for Terraform to use. // Configure environment variables for auth for Terraform to use.
log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", ")) log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", "))
err = tf.SetEnv(environ) err = tf.SetEnv(environ)

View File

@ -248,6 +248,27 @@ func TestSetProxyEnvVars(t *testing.T) {
assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env)) assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env))
} }
func TestSetUserAgentExtraEnvVar(t *testing.T) {
b := &bundle.Bundle{
RootPath: t.TempDir(),
Config: config.Root{
Experimental: &config.Experimental{
PyDABs: config.PyDABs{
Enabled: true,
},
},
},
}
env := make(map[string]string, 0)
err := setUserAgentExtraEnvVar(env, b)
require.NoError(t, err)
assert.Equal(t, map[string]string{
"DATABRICKS_USER_AGENT_EXTRA": "databricks-pydabs/0.0.0",
}, env)
}
func TestInheritEnvVars(t *testing.T) { func TestInheritEnvVars(t *testing.T) {
env := map[string]string{} env := map[string]string{}

View File

@ -28,7 +28,7 @@ func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) {
p[0] = dyn.Key(newKey) p[0] = dyn.Key(newKey)
acc, err = dyn.SetByPath(acc, p, v) acc, err = dyn.SetByPath(acc, p, v)
if err != nil { if err != nil {
return dyn.NilValue, err return dyn.InvalidValue, err
} }
return dyn.InvalidValue, dyn.ErrDrop return dyn.InvalidValue, dyn.ErrDrop
} }

View File

@ -2,6 +2,8 @@ package schema
import ( import (
"context" "context"
"errors"
"io/fs"
"os" "os"
"path/filepath" "path/filepath"
@ -41,7 +43,7 @@ func Load(ctx context.Context) (*tfjson.ProviderSchema, error) {
} }
// Generate schema file if it doesn't exist. // Generate schema file if it doesn't exist.
if _, err := os.Stat(s.ProviderSchemaFile); os.IsNotExist(err) { if _, err := os.Stat(s.ProviderSchemaFile); errors.Is(err, fs.ErrNotExist) {
err = s.Generate(ctx) err = s.Generate(ctx)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -1,3 +1,3 @@
package schema package schema
const ProviderVersion = "1.46.0" const ProviderVersion = "1.48.0"

View File

@ -28,6 +28,7 @@ type Config struct {
Profile string `json:"profile,omitempty"` Profile string `json:"profile,omitempty"`
RateLimit int `json:"rate_limit,omitempty"` RateLimit int `json:"rate_limit,omitempty"`
RetryTimeoutSeconds int `json:"retry_timeout_seconds,omitempty"` RetryTimeoutSeconds int `json:"retry_timeout_seconds,omitempty"`
ServerlessComputeId string `json:"serverless_compute_id,omitempty"`
SkipVerify bool `json:"skip_verify,omitempty"` SkipVerify bool `json:"skip_verify,omitempty"`
Token string `json:"token,omitempty"` Token string `json:"token,omitempty"`
Username string `json:"username,omitempty"` Username string `json:"username,omitempty"`

View File

@ -0,0 +1,12 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceAwsUnityCatalogAssumeRolePolicy struct {
AwsAccountId string `json:"aws_account_id"`
ExternalId string `json:"external_id"`
Id string `json:"id,omitempty"`
Json string `json:"json,omitempty"`
RoleName string `json:"role_name"`
UnityCatalogIamArn string `json:"unity_catalog_iam_arn,omitempty"`
}

View File

@ -19,6 +19,7 @@ type DataSourceExternalLocationExternalLocationInfo struct {
CreatedBy string `json:"created_by,omitempty"` CreatedBy string `json:"created_by,omitempty"`
CredentialId string `json:"credential_id,omitempty"` CredentialId string `json:"credential_id,omitempty"`
CredentialName string `json:"credential_name,omitempty"` CredentialName string `json:"credential_name,omitempty"`
IsolationMode string `json:"isolation_mode,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"` MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
Owner string `json:"owner,omitempty"` Owner string `json:"owner,omitempty"`

View File

@ -26,6 +26,7 @@ type DataSourceJobJobSettingsSettingsEmailNotifications struct {
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []string `json:"on_failure,omitempty"` OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"` OnStart []string `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []string `json:"on_success,omitempty"` OnSuccess []string `json:"on_success,omitempty"`
} }
@ -500,6 +501,7 @@ type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct {
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []string `json:"on_failure,omitempty"` OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"` OnStart []string `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []string `json:"on_success,omitempty"` OnSuccess []string `json:"on_success,omitempty"`
} }
@ -529,6 +531,7 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskEmailNotifications struc
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []string `json:"on_failure,omitempty"` OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"` OnStart []string `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []string `json:"on_success,omitempty"` OnSuccess []string `json:"on_success,omitempty"`
} }
@ -824,6 +827,10 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSt
Id string `json:"id"` Id string `json:"id"`
} }
type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
Id string `json:"id"`
}
type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess struct { type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess struct {
Id string `json:"id"` Id string `json:"id"`
} }
@ -832,6 +839,7 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotifications str
OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnFailure []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` OnStart []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` OnSuccess []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
} }
@ -1163,6 +1171,10 @@ type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart struct {
Id string `json:"id"` Id string `json:"id"`
} }
type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
Id string `json:"id"`
}
type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess struct { type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess struct {
Id string `json:"id"` Id string `json:"id"`
} }
@ -1171,6 +1183,7 @@ type DataSourceJobJobSettingsSettingsTaskWebhookNotifications struct {
OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnFailure []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` OnStart []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` OnSuccess []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
} }
@ -1236,6 +1249,10 @@ type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart struct {
Id string `json:"id"` Id string `json:"id"`
} }
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStreamingBacklogExceeded struct {
Id string `json:"id"`
}
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct { type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct {
Id string `json:"id"` Id string `json:"id"`
} }
@ -1244,6 +1261,7 @@ type DataSourceJobJobSettingsSettingsWebhookNotifications struct {
OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
} }

View File

@ -36,6 +36,7 @@ type DataSourceStorageCredentialStorageCredentialInfo struct {
CreatedAt int `json:"created_at,omitempty"` CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"` CreatedBy string `json:"created_by,omitempty"`
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
IsolationMode string `json:"isolation_mode,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"` MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
Owner string `json:"owner,omitempty"` Owner string `json:"owner,omitempty"`

View File

@ -3,105 +3,107 @@
package schema package schema
type DataSources struct { type DataSources struct {
AwsAssumeRolePolicy map[string]any `json:"databricks_aws_assume_role_policy,omitempty"` AwsAssumeRolePolicy map[string]any `json:"databricks_aws_assume_role_policy,omitempty"`
AwsBucketPolicy map[string]any `json:"databricks_aws_bucket_policy,omitempty"` AwsBucketPolicy map[string]any `json:"databricks_aws_bucket_policy,omitempty"`
AwsCrossaccountPolicy map[string]any `json:"databricks_aws_crossaccount_policy,omitempty"` AwsCrossaccountPolicy map[string]any `json:"databricks_aws_crossaccount_policy,omitempty"`
AwsUnityCatalogPolicy map[string]any `json:"databricks_aws_unity_catalog_policy,omitempty"` AwsUnityCatalogAssumeRolePolicy map[string]any `json:"databricks_aws_unity_catalog_assume_role_policy,omitempty"`
Catalog map[string]any `json:"databricks_catalog,omitempty"` AwsUnityCatalogPolicy map[string]any `json:"databricks_aws_unity_catalog_policy,omitempty"`
Catalogs map[string]any `json:"databricks_catalogs,omitempty"` Catalog map[string]any `json:"databricks_catalog,omitempty"`
Cluster map[string]any `json:"databricks_cluster,omitempty"` Catalogs map[string]any `json:"databricks_catalogs,omitempty"`
ClusterPolicy map[string]any `json:"databricks_cluster_policy,omitempty"` Cluster map[string]any `json:"databricks_cluster,omitempty"`
Clusters map[string]any `json:"databricks_clusters,omitempty"` ClusterPolicy map[string]any `json:"databricks_cluster_policy,omitempty"`
CurrentConfig map[string]any `json:"databricks_current_config,omitempty"` Clusters map[string]any `json:"databricks_clusters,omitempty"`
CurrentMetastore map[string]any `json:"databricks_current_metastore,omitempty"` CurrentConfig map[string]any `json:"databricks_current_config,omitempty"`
CurrentUser map[string]any `json:"databricks_current_user,omitempty"` CurrentMetastore map[string]any `json:"databricks_current_metastore,omitempty"`
DbfsFile map[string]any `json:"databricks_dbfs_file,omitempty"` CurrentUser map[string]any `json:"databricks_current_user,omitempty"`
DbfsFilePaths map[string]any `json:"databricks_dbfs_file_paths,omitempty"` DbfsFile map[string]any `json:"databricks_dbfs_file,omitempty"`
Directory map[string]any `json:"databricks_directory,omitempty"` DbfsFilePaths map[string]any `json:"databricks_dbfs_file_paths,omitempty"`
ExternalLocation map[string]any `json:"databricks_external_location,omitempty"` Directory map[string]any `json:"databricks_directory,omitempty"`
ExternalLocations map[string]any `json:"databricks_external_locations,omitempty"` ExternalLocation map[string]any `json:"databricks_external_location,omitempty"`
Group map[string]any `json:"databricks_group,omitempty"` ExternalLocations map[string]any `json:"databricks_external_locations,omitempty"`
InstancePool map[string]any `json:"databricks_instance_pool,omitempty"` Group map[string]any `json:"databricks_group,omitempty"`
InstanceProfiles map[string]any `json:"databricks_instance_profiles,omitempty"` InstancePool map[string]any `json:"databricks_instance_pool,omitempty"`
Job map[string]any `json:"databricks_job,omitempty"` InstanceProfiles map[string]any `json:"databricks_instance_profiles,omitempty"`
Jobs map[string]any `json:"databricks_jobs,omitempty"` Job map[string]any `json:"databricks_job,omitempty"`
Metastore map[string]any `json:"databricks_metastore,omitempty"` Jobs map[string]any `json:"databricks_jobs,omitempty"`
Metastores map[string]any `json:"databricks_metastores,omitempty"` Metastore map[string]any `json:"databricks_metastore,omitempty"`
MlflowExperiment map[string]any `json:"databricks_mlflow_experiment,omitempty"` Metastores map[string]any `json:"databricks_metastores,omitempty"`
MlflowModel map[string]any `json:"databricks_mlflow_model,omitempty"` MlflowExperiment map[string]any `json:"databricks_mlflow_experiment,omitempty"`
MwsCredentials map[string]any `json:"databricks_mws_credentials,omitempty"` MlflowModel map[string]any `json:"databricks_mlflow_model,omitempty"`
MwsWorkspaces map[string]any `json:"databricks_mws_workspaces,omitempty"` MwsCredentials map[string]any `json:"databricks_mws_credentials,omitempty"`
NodeType map[string]any `json:"databricks_node_type,omitempty"` MwsWorkspaces map[string]any `json:"databricks_mws_workspaces,omitempty"`
Notebook map[string]any `json:"databricks_notebook,omitempty"` NodeType map[string]any `json:"databricks_node_type,omitempty"`
NotebookPaths map[string]any `json:"databricks_notebook_paths,omitempty"` Notebook map[string]any `json:"databricks_notebook,omitempty"`
Pipelines map[string]any `json:"databricks_pipelines,omitempty"` NotebookPaths map[string]any `json:"databricks_notebook_paths,omitempty"`
Schemas map[string]any `json:"databricks_schemas,omitempty"` Pipelines map[string]any `json:"databricks_pipelines,omitempty"`
ServicePrincipal map[string]any `json:"databricks_service_principal,omitempty"` Schemas map[string]any `json:"databricks_schemas,omitempty"`
ServicePrincipals map[string]any `json:"databricks_service_principals,omitempty"` ServicePrincipal map[string]any `json:"databricks_service_principal,omitempty"`
Share map[string]any `json:"databricks_share,omitempty"` ServicePrincipals map[string]any `json:"databricks_service_principals,omitempty"`
Shares map[string]any `json:"databricks_shares,omitempty"` Share map[string]any `json:"databricks_share,omitempty"`
SparkVersion map[string]any `json:"databricks_spark_version,omitempty"` Shares map[string]any `json:"databricks_shares,omitempty"`
SqlWarehouse map[string]any `json:"databricks_sql_warehouse,omitempty"` SparkVersion map[string]any `json:"databricks_spark_version,omitempty"`
SqlWarehouses map[string]any `json:"databricks_sql_warehouses,omitempty"` SqlWarehouse map[string]any `json:"databricks_sql_warehouse,omitempty"`
StorageCredential map[string]any `json:"databricks_storage_credential,omitempty"` SqlWarehouses map[string]any `json:"databricks_sql_warehouses,omitempty"`
StorageCredentials map[string]any `json:"databricks_storage_credentials,omitempty"` StorageCredential map[string]any `json:"databricks_storage_credential,omitempty"`
Table map[string]any `json:"databricks_table,omitempty"` StorageCredentials map[string]any `json:"databricks_storage_credentials,omitempty"`
Tables map[string]any `json:"databricks_tables,omitempty"` Table map[string]any `json:"databricks_table,omitempty"`
User map[string]any `json:"databricks_user,omitempty"` Tables map[string]any `json:"databricks_tables,omitempty"`
Views map[string]any `json:"databricks_views,omitempty"` User map[string]any `json:"databricks_user,omitempty"`
Volumes map[string]any `json:"databricks_volumes,omitempty"` Views map[string]any `json:"databricks_views,omitempty"`
Zones map[string]any `json:"databricks_zones,omitempty"` Volumes map[string]any `json:"databricks_volumes,omitempty"`
Zones map[string]any `json:"databricks_zones,omitempty"`
} }
func NewDataSources() *DataSources { func NewDataSources() *DataSources {
return &DataSources{ return &DataSources{
AwsAssumeRolePolicy: make(map[string]any), AwsAssumeRolePolicy: make(map[string]any),
AwsBucketPolicy: make(map[string]any), AwsBucketPolicy: make(map[string]any),
AwsCrossaccountPolicy: make(map[string]any), AwsCrossaccountPolicy: make(map[string]any),
AwsUnityCatalogPolicy: make(map[string]any), AwsUnityCatalogAssumeRolePolicy: make(map[string]any),
Catalog: make(map[string]any), AwsUnityCatalogPolicy: make(map[string]any),
Catalogs: make(map[string]any), Catalog: make(map[string]any),
Cluster: make(map[string]any), Catalogs: make(map[string]any),
ClusterPolicy: make(map[string]any), Cluster: make(map[string]any),
Clusters: make(map[string]any), ClusterPolicy: make(map[string]any),
CurrentConfig: make(map[string]any), Clusters: make(map[string]any),
CurrentMetastore: make(map[string]any), CurrentConfig: make(map[string]any),
CurrentUser: make(map[string]any), CurrentMetastore: make(map[string]any),
DbfsFile: make(map[string]any), CurrentUser: make(map[string]any),
DbfsFilePaths: make(map[string]any), DbfsFile: make(map[string]any),
Directory: make(map[string]any), DbfsFilePaths: make(map[string]any),
ExternalLocation: make(map[string]any), Directory: make(map[string]any),
ExternalLocations: make(map[string]any), ExternalLocation: make(map[string]any),
Group: make(map[string]any), ExternalLocations: make(map[string]any),
InstancePool: make(map[string]any), Group: make(map[string]any),
InstanceProfiles: make(map[string]any), InstancePool: make(map[string]any),
Job: make(map[string]any), InstanceProfiles: make(map[string]any),
Jobs: make(map[string]any), Job: make(map[string]any),
Metastore: make(map[string]any), Jobs: make(map[string]any),
Metastores: make(map[string]any), Metastore: make(map[string]any),
MlflowExperiment: make(map[string]any), Metastores: make(map[string]any),
MlflowModel: make(map[string]any), MlflowExperiment: make(map[string]any),
MwsCredentials: make(map[string]any), MlflowModel: make(map[string]any),
MwsWorkspaces: make(map[string]any), MwsCredentials: make(map[string]any),
NodeType: make(map[string]any), MwsWorkspaces: make(map[string]any),
Notebook: make(map[string]any), NodeType: make(map[string]any),
NotebookPaths: make(map[string]any), Notebook: make(map[string]any),
Pipelines: make(map[string]any), NotebookPaths: make(map[string]any),
Schemas: make(map[string]any), Pipelines: make(map[string]any),
ServicePrincipal: make(map[string]any), Schemas: make(map[string]any),
ServicePrincipals: make(map[string]any), ServicePrincipal: make(map[string]any),
Share: make(map[string]any), ServicePrincipals: make(map[string]any),
Shares: make(map[string]any), Share: make(map[string]any),
SparkVersion: make(map[string]any), Shares: make(map[string]any),
SqlWarehouse: make(map[string]any), SparkVersion: make(map[string]any),
SqlWarehouses: make(map[string]any), SqlWarehouse: make(map[string]any),
StorageCredential: make(map[string]any), SqlWarehouses: make(map[string]any),
StorageCredentials: make(map[string]any), StorageCredential: make(map[string]any),
Table: make(map[string]any), StorageCredentials: make(map[string]any),
Tables: make(map[string]any), Table: make(map[string]any),
User: make(map[string]any), Tables: make(map[string]any),
Views: make(map[string]any), User: make(map[string]any),
Volumes: make(map[string]any), Views: make(map[string]any),
Zones: make(map[string]any), Volumes: make(map[string]any),
Zones: make(map[string]any),
} }
} }

View File

@ -26,6 +26,7 @@ type ResourceJobEmailNotifications struct {
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []string `json:"on_failure,omitempty"` OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"` OnStart []string `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []string `json:"on_success,omitempty"` OnSuccess []string `json:"on_success,omitempty"`
} }
@ -573,6 +574,7 @@ type ResourceJobTaskEmailNotifications struct {
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []string `json:"on_failure,omitempty"` OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"` OnStart []string `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []string `json:"on_success,omitempty"` OnSuccess []string `json:"on_success,omitempty"`
} }
@ -602,6 +604,7 @@ type ResourceJobTaskForEachTaskTaskEmailNotifications struct {
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []string `json:"on_failure,omitempty"` OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"` OnStart []string `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []string `json:"on_success,omitempty"` OnSuccess []string `json:"on_success,omitempty"`
} }
@ -943,6 +946,10 @@ type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart struct {
Id string `json:"id"` Id string `json:"id"`
} }
type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
Id string `json:"id"`
}
type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess struct { type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess struct {
Id string `json:"id"` Id string `json:"id"`
} }
@ -951,6 +958,7 @@ type ResourceJobTaskForEachTaskTaskWebhookNotifications struct {
OnDurationWarningThresholdExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnFailure []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` OnStart []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` OnSuccess []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
} }
@ -1329,6 +1337,10 @@ type ResourceJobTaskWebhookNotificationsOnStart struct {
Id string `json:"id"` Id string `json:"id"`
} }
type ResourceJobTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
Id string `json:"id"`
}
type ResourceJobTaskWebhookNotificationsOnSuccess struct { type ResourceJobTaskWebhookNotificationsOnSuccess struct {
Id string `json:"id"` Id string `json:"id"`
} }
@ -1337,6 +1349,7 @@ type ResourceJobTaskWebhookNotifications struct {
OnDurationWarningThresholdExceeded []ResourceJobTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []ResourceJobTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []ResourceJobTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnFailure []ResourceJobTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []ResourceJobTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` OnStart []ResourceJobTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []ResourceJobTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []ResourceJobTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` OnSuccess []ResourceJobTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
} }
@ -1378,6 +1391,11 @@ type ResourceJobTriggerFileArrival struct {
WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"` WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"`
} }
type ResourceJobTriggerPeriodic struct {
Interval int `json:"interval"`
Unit string `json:"unit"`
}
type ResourceJobTriggerTable struct { type ResourceJobTriggerTable struct {
Condition string `json:"condition,omitempty"` Condition string `json:"condition,omitempty"`
MinTimeBetweenTriggersSeconds int `json:"min_time_between_triggers_seconds,omitempty"` MinTimeBetweenTriggersSeconds int `json:"min_time_between_triggers_seconds,omitempty"`
@ -1395,6 +1413,7 @@ type ResourceJobTriggerTableUpdate struct {
type ResourceJobTrigger struct { type ResourceJobTrigger struct {
PauseStatus string `json:"pause_status,omitempty"` PauseStatus string `json:"pause_status,omitempty"`
FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"` FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"`
Periodic *ResourceJobTriggerPeriodic `json:"periodic,omitempty"`
Table *ResourceJobTriggerTable `json:"table,omitempty"` Table *ResourceJobTriggerTable `json:"table,omitempty"`
TableUpdate *ResourceJobTriggerTableUpdate `json:"table_update,omitempty"` TableUpdate *ResourceJobTriggerTableUpdate `json:"table_update,omitempty"`
} }
@ -1411,6 +1430,10 @@ type ResourceJobWebhookNotificationsOnStart struct {
Id string `json:"id"` Id string `json:"id"`
} }
type ResourceJobWebhookNotificationsOnStreamingBacklogExceeded struct {
Id string `json:"id"`
}
type ResourceJobWebhookNotificationsOnSuccess struct { type ResourceJobWebhookNotificationsOnSuccess struct {
Id string `json:"id"` Id string `json:"id"`
} }
@ -1419,6 +1442,7 @@ type ResourceJobWebhookNotifications struct {
OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnStreamingBacklogExceeded []ResourceJobWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
} }

View File

@ -43,6 +43,7 @@ type ResourceMwsWorkspaces struct {
CustomTags map[string]string `json:"custom_tags,omitempty"` CustomTags map[string]string `json:"custom_tags,omitempty"`
CustomerManagedKeyId string `json:"customer_managed_key_id,omitempty"` CustomerManagedKeyId string `json:"customer_managed_key_id,omitempty"`
DeploymentName string `json:"deployment_name,omitempty"` DeploymentName string `json:"deployment_name,omitempty"`
GcpWorkspaceSa string `json:"gcp_workspace_sa,omitempty"`
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
IsNoPublicIpEnabled bool `json:"is_no_public_ip_enabled,omitempty"` IsNoPublicIpEnabled bool `json:"is_no_public_ip_enabled,omitempty"`
Location string `json:"location,omitempty"` Location string `json:"location,omitempty"`

View File

@ -19,8 +19,9 @@ type ResourceOnlineTableSpec struct {
} }
type ResourceOnlineTable struct { type ResourceOnlineTable struct {
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
Name string `json:"name"` Name string `json:"name"`
Status []any `json:"status,omitempty"` Status []any `json:"status,omitempty"`
Spec *ResourceOnlineTableSpec `json:"spec,omitempty"` TableServingUrl string `json:"table_serving_url,omitempty"`
Spec *ResourceOnlineTableSpec `json:"spec,omitempty"`
} }

View File

@ -41,6 +41,7 @@ type ResourceStorageCredential struct {
Owner string `json:"owner,omitempty"` Owner string `json:"owner,omitempty"`
ReadOnly bool `json:"read_only,omitempty"` ReadOnly bool `json:"read_only,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty"` SkipValidation bool `json:"skip_validation,omitempty"`
StorageCredentialId string `json:"storage_credential_id,omitempty"`
AwsIamRole *ResourceStorageCredentialAwsIamRole `json:"aws_iam_role,omitempty"` AwsIamRole *ResourceStorageCredentialAwsIamRole `json:"aws_iam_role,omitempty"`
AzureManagedIdentity *ResourceStorageCredentialAzureManagedIdentity `json:"azure_managed_identity,omitempty"` AzureManagedIdentity *ResourceStorageCredentialAzureManagedIdentity `json:"azure_managed_identity,omitempty"`
AzureServicePrincipal *ResourceStorageCredentialAzureServicePrincipal `json:"azure_service_principal,omitempty"` AzureServicePrincipal *ResourceStorageCredentialAzureServicePrincipal `json:"azure_service_principal,omitempty"`

View File

@ -3,6 +3,7 @@
package schema package schema
type ResourceSystemSchema struct { type ResourceSystemSchema struct {
FullName string `json:"full_name,omitempty"`
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"` MetastoreId string `json:"metastore_id,omitempty"`
Schema string `json:"schema,omitempty"` Schema string `json:"schema,omitempty"`

View File

@ -21,7 +21,7 @@ type Root struct {
const ProviderHost = "registry.terraform.io" const ProviderHost = "registry.terraform.io"
const ProviderSource = "databricks/databricks" const ProviderSource = "databricks/databricks"
const ProviderVersion = "1.46.0" const ProviderVersion = "1.48.0"
func NewRoot() *Root { func NewRoot() *Root {
return &Root{ return &Root{

View File

@ -66,6 +66,11 @@ func (m *filterCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) diag.Di
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
rv, err := dyn.Get(v, "resources") rv, err := dyn.Get(v, "resources")
if err != nil { if err != nil {
// If the resources key is not found, we can skip this mutator.
if dyn.IsNoSuchKeyError(err) {
return v, nil
}
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }

View File

@ -4,6 +4,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
"github.com/databricks/cli/bundle/deploy/metadata" "github.com/databricks/cli/bundle/deploy/metadata"
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/permissions"
@ -28,8 +29,13 @@ func Initialize() bundle.Mutator {
mutator.ExpandWorkspaceRoot(), mutator.ExpandWorkspaceRoot(),
mutator.DefineDefaultWorkspacePaths(), mutator.DefineDefaultWorkspacePaths(),
mutator.SetVariables(), mutator.SetVariables(),
// Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences,
// ResolveVariableReferencesInComplexVariables and ResolveVariableReferences.
// See what is expected in PythonMutatorPhaseInit doc
pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit),
mutator.ResolveVariableReferencesInLookup(), mutator.ResolveVariableReferencesInLookup(),
mutator.ResolveResourceReferences(), mutator.ResolveResourceReferences(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveVariableReferences( mutator.ResolveVariableReferences(
"bundle", "bundle",
"workspace", "workspace",
@ -41,6 +47,10 @@ func Initialize() bundle.Mutator {
mutator.ProcessTargetMode(), mutator.ProcessTargetMode(),
mutator.DefaultQueueing(), mutator.DefaultQueueing(),
mutator.ExpandPipelineGlobPaths(), mutator.ExpandPipelineGlobPaths(),
// Configure use of WSFS for reads if the CLI is running on Databricks.
mutator.ConfigureWSFS(),
mutator.TranslatePaths(), mutator.TranslatePaths(),
python.WrapperWarning(), python.WrapperWarning(),
permissions.ApplyBundlePermissions(), permissions.ApplyBundlePermissions(),

View File

@ -0,0 +1,176 @@
package render
import (
"fmt"
"io"
"path/filepath"
"strings"
"text/template"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/fatih/color"
)
var renderFuncMap = template.FuncMap{
"red": color.RedString,
"green": color.GreenString,
"blue": color.BlueString,
"yellow": color.YellowString,
"magenta": color.MagentaString,
"cyan": color.CyanString,
"bold": func(format string, a ...interface{}) string {
return color.New(color.Bold).Sprintf(format, a...)
},
"italic": func(format string, a ...interface{}) string {
return color.New(color.Italic).Sprintf(format, a...)
},
}
const errorTemplate = `{{ "Error" | red }}: {{ .Summary }}
{{- if .Path.String }}
{{ "at " }}{{ .Path.String | green }}
{{- end }}
{{- if .Location.File }}
{{ "in " }}{{ .Location.String | cyan }}
{{- end }}
{{- if .Detail }}
{{ .Detail }}
{{- end }}
`
const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }}
{{- if .Path.String }}
{{ "at " }}{{ .Path.String | green }}
{{- end }}
{{- if .Location.File }}
{{ "in " }}{{ .Location.String | cyan }}
{{- end }}
{{- if .Detail }}
{{ .Detail }}
{{- end }}
`
const summaryTemplate = `{{- if .Name -}}
Name: {{ .Name | bold }}
{{- if .Target }}
Target: {{ .Target | bold }}
{{- end }}
{{- if or .User .Host .Path }}
Workspace:
{{- if .Host }}
Host: {{ .Host | bold }}
{{- end }}
{{- if .User }}
User: {{ .User | bold }}
{{- end }}
{{- if .Path }}
Path: {{ .Path | bold }}
{{- end }}
{{- end }}
{{ end -}}
{{ .Trailer }}
`
func pluralize(n int, singular, plural string) string {
if n == 1 {
return fmt.Sprintf("%d %s", n, singular)
}
return fmt.Sprintf("%d %s", n, plural)
}
func buildTrailer(diags diag.Diagnostics) string {
parts := []string{}
if errors := len(diags.Filter(diag.Error)); errors > 0 {
parts = append(parts, color.RedString(pluralize(errors, "error", "errors")))
}
if warnings := len(diags.Filter(diag.Warning)); warnings > 0 {
parts = append(parts, color.YellowString(pluralize(warnings, "warning", "warnings")))
}
if len(parts) > 0 {
return fmt.Sprintf("Found %s", strings.Join(parts, " and "))
} else {
return color.GreenString("Validation OK!")
}
}
func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
if b == nil {
return renderSummaryTemplate(out, &bundle.Bundle{}, diags)
}
var currentUser = &iam.User{}
if b.Config.Workspace.CurrentUser != nil {
if b.Config.Workspace.CurrentUser.User != nil {
currentUser = b.Config.Workspace.CurrentUser.User
}
}
t := template.Must(template.New("summary").Funcs(renderFuncMap).Parse(summaryTemplate))
err := t.Execute(out, map[string]any{
"Name": b.Config.Bundle.Name,
"Target": b.Config.Bundle.Target,
"User": currentUser.UserName,
"Path": b.Config.Workspace.RootPath,
"Host": b.Config.Workspace.Host,
"Trailer": buildTrailer(diags),
})
return err
}
func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate))
warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate))
// Print errors and warnings.
for _, d := range diags {
var t *template.Template
switch d.Severity {
case diag.Error:
t = errorT
case diag.Warning:
t = warningT
}
// Make file relative to bundle root
if d.Location.File != "" {
out, err := filepath.Rel(b.RootPath, d.Location.File)
// if we can't relativize the path, just use path as-is
if err == nil {
d.Location.File = out
}
}
// Render the diagnostic with the appropriate template.
err := t.Execute(out, d)
if err != nil {
return fmt.Errorf("failed to render template: %w", err)
}
}
return nil
}
// RenderTextOutput renders the diagnostics in a human-readable format.
func RenderTextOutput(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
err := renderDiagnostics(out, b, diags)
if err != nil {
return fmt.Errorf("failed to render diagnostics: %w", err)
}
err = renderSummaryTemplate(out, b, diags)
if err != nil {
return fmt.Errorf("failed to render summary: %w", err)
}
return nil
}

View File

@ -0,0 +1,258 @@
package render
import (
"bytes"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/require"
)
type renderTestOutputTestCase struct {
name string
bundle *bundle.Bundle
diags diag.Diagnostics
expected string
}
func TestRenderTextOutput(t *testing.T) {
loadingBundle := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "test-bundle",
Target: "test-target",
},
},
}
testCases := []renderTestOutputTestCase{
{
name: "nil bundle and 1 error",
diags: diag.Diagnostics{
{
Severity: diag.Error,
Summary: "failed to load xxx",
},
},
expected: "Error: failed to load xxx\n" +
"\n" +
"Found 1 error\n",
},
{
name: "bundle during 'load' and 1 error",
bundle: loadingBundle,
diags: diag.Errorf("failed to load bundle"),
expected: "Error: failed to load bundle\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 1 error\n",
},
{
name: "bundle during 'load' and 1 warning",
bundle: loadingBundle,
diags: diag.Warningf("failed to load bundle"),
expected: "Warning: failed to load bundle\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 1 warning\n",
},
{
name: "bundle during 'load' and 2 warnings",
bundle: loadingBundle,
diags: diag.Warningf("warning (1)").Extend(diag.Warningf("warning (2)")),
expected: "Warning: warning (1)\n" +
"\n" +
"Warning: warning (2)\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 2 warnings\n",
},
{
name: "bundle during 'load' and 2 errors, 1 warning with details",
bundle: loadingBundle,
diags: diag.Diagnostics{
diag.Diagnostic{
Severity: diag.Error,
Summary: "error (1)",
Detail: "detail (1)",
Location: dyn.Location{
File: "foo.py",
Line: 1,
Column: 1,
},
},
diag.Diagnostic{
Severity: diag.Error,
Summary: "error (2)",
Detail: "detail (2)",
Location: dyn.Location{
File: "foo.py",
Line: 2,
Column: 1,
},
},
diag.Diagnostic{
Severity: diag.Warning,
Summary: "warning (3)",
Detail: "detail (3)",
Location: dyn.Location{
File: "foo.py",
Line: 3,
Column: 1,
},
},
},
expected: "Error: error (1)\n" +
" in foo.py:1:1\n" +
"\n" +
"detail (1)\n" +
"\n" +
"Error: error (2)\n" +
" in foo.py:2:1\n" +
"\n" +
"detail (2)\n" +
"\n" +
"Warning: warning (3)\n" +
" in foo.py:3:1\n" +
"\n" +
"detail (3)\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 2 errors and 1 warning\n",
},
{
name: "bundle during 'init'",
bundle: &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "test-bundle",
Target: "test-target",
},
Workspace: config.Workspace{
Host: "https://localhost/",
CurrentUser: &config.User{
User: &iam.User{
UserName: "test-user",
},
},
RootPath: "/Users/test-user@databricks.com/.bundle/examples/test-target",
},
},
},
diags: nil,
expected: "Name: test-bundle\n" +
"Target: test-target\n" +
"Workspace:\n" +
" Host: https://localhost/\n" +
" User: test-user\n" +
" Path: /Users/test-user@databricks.com/.bundle/examples/test-target\n" +
"\n" +
"Validation OK!\n",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
writer := &bytes.Buffer{}
err := RenderTextOutput(writer, tc.bundle, tc.diags)
require.NoError(t, err)
assert.Equal(t, tc.expected, writer.String())
})
}
}
type renderDiagnosticsTestCase struct {
name string
diags diag.Diagnostics
expected string
}
func TestRenderDiagnostics(t *testing.T) {
bundle := &bundle.Bundle{}
testCases := []renderDiagnosticsTestCase{
{
name: "empty diagnostics",
diags: diag.Diagnostics{},
expected: "",
},
{
name: "error with short summary",
diags: diag.Diagnostics{
{
Severity: diag.Error,
Summary: "failed to load xxx",
},
},
expected: "Error: failed to load xxx\n\n",
},
{
name: "error with source location",
diags: diag.Diagnostics{
{
Severity: diag.Error,
Summary: "failed to load xxx",
Detail: "'name' is required",
Location: dyn.Location{
File: "foo.yaml",
Line: 1,
Column: 2,
},
},
},
expected: "Error: failed to load xxx\n" +
" in foo.yaml:1:2\n\n" +
"'name' is required\n\n",
},
{
name: "error with path",
diags: diag.Diagnostics{
{
Severity: diag.Error,
Detail: "'name' is required",
Summary: "failed to load xxx",
Path: dyn.MustPathFromString("resources.jobs.xxx"),
},
},
expected: "Error: failed to load xxx\n" +
" at resources.jobs.xxx\n" +
"\n" +
"'name' is required\n\n",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
writer := &bytes.Buffer{}
err := renderDiagnostics(writer, bundle, tc.diags)
require.NoError(t, err)
assert.Equal(t, tc.expected, writer.String())
})
}
}
func TestRenderSummaryTemplate_nilBundle(t *testing.T) {
writer := &bytes.Buffer{}
err := renderSummaryTemplate(writer, nil, nil)
require.NoError(t, err)
assert.Equal(t, "Validation OK!\n", writer.String())
}

View File

@ -79,6 +79,17 @@
"experimental": { "experimental": {
"description": "", "description": "",
"properties": { "properties": {
"pydabs": {
"description": "",
"properties": {
"enabled": {
"description": ""
},
"venv_path": {
"description": ""
}
}
},
"python_wheel_wrapper": { "python_wheel_wrapper": {
"description": "" "description": ""
}, },
@ -236,6 +247,12 @@
"description": "" "description": ""
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
"items": {
"description": ""
}
},
"on_success": { "on_success": {
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"items": { "items": {
@ -853,6 +870,12 @@
"description": "" "description": ""
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
"items": {
"description": ""
}
},
"on_success": { "on_success": {
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"items": { "items": {
@ -1595,6 +1618,17 @@
} }
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
"items": {
"description": "",
"properties": {
"id": {
"description": ""
}
}
}
},
"on_success": { "on_success": {
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
"items": { "items": {
@ -1634,6 +1668,17 @@
"pause_status": { "pause_status": {
"description": "Whether this trigger is paused or not." "description": "Whether this trigger is paused or not."
}, },
"periodic": {
"description": "Periodic trigger settings.",
"properties": {
"interval": {
"description": "The interval at which the trigger should run."
},
"unit": {
"description": "The unit of time for the interval."
}
}
},
"table": { "table": {
"description": "Old table trigger settings name. Deprecated in favor of `table_update`.", "description": "Old table trigger settings name. Deprecated in favor of `table_update`.",
"properties": { "properties": {
@ -1712,6 +1757,17 @@
} }
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
"items": {
"description": "",
"properties": {
"id": {
"description": ""
}
}
}
},
"on_success": { "on_success": {
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
"items": { "items": {
@ -1740,16 +1796,16 @@
"description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.", "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
"properties": { "properties": {
"catalog_name": { "catalog_name": {
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if it was already set." "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled."
}, },
"enabled": { "enabled": {
"description": "If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, you cannot enable again." "description": "Indicates whether the inference table is enabled."
}, },
"schema_name": { "schema_name": {
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if it was already set." "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled."
}, },
"table_name_prefix": { "table_name_prefix": {
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if it was already set." "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled."
} }
} }
}, },
@ -1915,7 +1971,7 @@
"description": "ARN of the instance profile that the served model will use to access AWS resources." "description": "ARN of the instance profile that the served model will use to access AWS resources."
}, },
"model_name": { "model_name": {
"description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model, \nin the form of __catalog_name__.__schema_name__.__model_name__.\n" "description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n"
}, },
"model_version": { "model_version": {
"description": "The version of the model in Databricks Model Registry or Unity Catalog to be served." "description": "The version of the model in Databricks Model Registry or Unity Catalog to be served."
@ -2623,7 +2679,7 @@
} }
}, },
"notebook": { "notebook": {
"description": "The path to a notebook that defines a pipeline and is stored in the \u003cDatabricks\u003e workspace.\n", "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n",
"properties": { "properties": {
"path": { "path": {
"description": "The absolute path of the notebook." "description": "The absolute path of the notebook."
@ -2710,6 +2766,156 @@
} }
} }
}, },
"quality_monitors": {
"description": "",
"additionalproperties": {
"description": "",
"properties": {
"assets_dir": {
"description": ""
},
"baseline_table_name": {
"description": ""
},
"custom_metrics": {
"description": "",
"items": {
"description": "",
"properties": {
"definition": {
"description": ""
},
"input_columns": {
"description": "",
"items": {
"description": ""
}
},
"name": {
"description": ""
},
"output_data_type": {
"description": ""
},
"type": {
"description": ""
}
}
}
},
"data_classification_config": {
"description": "",
"properties": {
"enabled": {
"description": ""
}
}
},
"inference_log": {
"description": "",
"properties": {
"granularities": {
"description": "",
"items": {
"description": ""
}
},
"label_col": {
"description": ""
},
"model_id_col": {
"description": ""
},
"prediction_col": {
"description": ""
},
"prediction_proba_col": {
"description": ""
},
"problem_type": {
"description": ""
},
"timestamp_col": {
"description": ""
}
}
},
"notifications": {
"description": "",
"properties": {
"on_failure": {
"description": "",
"properties": {
"email_addresses": {
"description": "",
"items": {
"description": ""
}
}
}
},
"on_new_classification_tag_detected": {
"description": "",
"properties": {
"email_addresses": {
"description": "",
"items": {
"description": ""
}
}
}
}
}
},
"output_schema_name": {
"description": ""
},
"schedule": {
"description": "",
"properties": {
"pause_status": {
"description": ""
},
"quartz_cron_expression": {
"description": ""
},
"timezone_id": {
"description": ""
}
}
},
"skip_builtin_dashboard": {
"description": ""
},
"slicing_exprs": {
"description": "",
"items": {
"description": ""
}
},
"snapshot": {
"description": ""
},
"time_series": {
"description": "",
"properties": {
"granularities": {
"description": "",
"items": {
"description": ""
}
},
"timestamp_col": {
"description": ""
}
}
},
"warehouse_id": {
"description": ""
}
}
}
},
"registered_models": { "registered_models": {
"description": "List of Registered Models", "description": "List of Registered Models",
"additionalproperties": { "additionalproperties": {
@ -3017,6 +3223,12 @@
"description": "" "description": ""
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
"items": {
"description": ""
}
},
"on_success": { "on_success": {
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"items": { "items": {
@ -3634,6 +3846,12 @@
"description": "" "description": ""
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
"items": {
"description": ""
}
},
"on_success": { "on_success": {
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"items": { "items": {
@ -4376,6 +4594,17 @@
} }
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
"items": {
"description": "",
"properties": {
"id": {
"description": ""
}
}
}
},
"on_success": { "on_success": {
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
"items": { "items": {
@ -4415,6 +4644,17 @@
"pause_status": { "pause_status": {
"description": "Whether this trigger is paused or not." "description": "Whether this trigger is paused or not."
}, },
"periodic": {
"description": "Periodic trigger settings.",
"properties": {
"interval": {
"description": "The interval at which the trigger should run."
},
"unit": {
"description": "The unit of time for the interval."
}
}
},
"table": { "table": {
"description": "Old table trigger settings name. Deprecated in favor of `table_update`.", "description": "Old table trigger settings name. Deprecated in favor of `table_update`.",
"properties": { "properties": {
@ -4493,6 +4733,17 @@
} }
} }
}, },
"on_streaming_backlog_exceeded": {
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
"items": {
"description": "",
"properties": {
"id": {
"description": ""
}
}
}
},
"on_success": { "on_success": {
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
"items": { "items": {
@ -4521,16 +4772,16 @@
"description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.", "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
"properties": { "properties": {
"catalog_name": { "catalog_name": {
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if it was already set." "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled."
}, },
"enabled": { "enabled": {
"description": "If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, you cannot enable again." "description": "Indicates whether the inference table is enabled."
}, },
"schema_name": { "schema_name": {
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if it was already set." "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled."
}, },
"table_name_prefix": { "table_name_prefix": {
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if it was already set." "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled."
} }
} }
}, },
@ -4696,7 +4947,7 @@
"description": "ARN of the instance profile that the served model will use to access AWS resources." "description": "ARN of the instance profile that the served model will use to access AWS resources."
}, },
"model_name": { "model_name": {
"description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model, \nin the form of __catalog_name__.__schema_name__.__model_name__.\n" "description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n"
}, },
"model_version": { "model_version": {
"description": "The version of the model in Databricks Model Registry or Unity Catalog to be served." "description": "The version of the model in Databricks Model Registry or Unity Catalog to be served."
@ -5404,7 +5655,7 @@
} }
}, },
"notebook": { "notebook": {
"description": "The path to a notebook that defines a pipeline and is stored in the \u003cDatabricks\u003e workspace.\n", "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n",
"properties": { "properties": {
"path": { "path": {
"description": "The absolute path of the notebook." "description": "The absolute path of the notebook."
@ -5491,6 +5742,156 @@
} }
} }
}, },
"quality_monitors": {
"description": "",
"additionalproperties": {
"description": "",
"properties": {
"assets_dir": {
"description": ""
},
"baseline_table_name": {
"description": ""
},
"custom_metrics": {
"description": "",
"items": {
"description": "",
"properties": {
"definition": {
"description": ""
},
"input_columns": {
"description": "",
"items": {
"description": ""
}
},
"name": {
"description": ""
},
"output_data_type": {
"description": ""
},
"type": {
"description": ""
}
}
}
},
"data_classification_config": {
"description": "",
"properties": {
"enabled": {
"description": ""
}
}
},
"inference_log": {
"description": "",
"properties": {
"granularities": {
"description": "",
"items": {
"description": ""
}
},
"label_col": {
"description": ""
},
"model_id_col": {
"description": ""
},
"prediction_col": {
"description": ""
},
"prediction_proba_col": {
"description": ""
},
"problem_type": {
"description": ""
},
"timestamp_col": {
"description": ""
}
}
},
"notifications": {
"description": "",
"properties": {
"on_failure": {
"description": "",
"properties": {
"email_addresses": {
"description": "",
"items": {
"description": ""
}
}
}
},
"on_new_classification_tag_detected": {
"description": "",
"properties": {
"email_addresses": {
"description": "",
"items": {
"description": ""
}
}
}
}
}
},
"output_schema_name": {
"description": ""
},
"schedule": {
"description": "",
"properties": {
"pause_status": {
"description": ""
},
"quartz_cron_expression": {
"description": ""
},
"timezone_id": {
"description": ""
}
}
},
"skip_builtin_dashboard": {
"description": ""
},
"slicing_exprs": {
"description": "",
"items": {
"description": ""
}
},
"snapshot": {
"description": ""
},
"time_series": {
"description": "",
"properties": {
"granularities": {
"description": "",
"items": {
"description": ""
}
},
"timestamp_col": {
"description": ""
}
}
},
"warehouse_id": {
"description": ""
}
}
}
},
"registered_models": { "registered_models": {
"description": "List of Registered Models", "description": "List of Registered Models",
"additionalproperties": { "additionalproperties": {

View File

@ -20,7 +20,7 @@ func TestIntSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}` }`
@ -47,7 +47,7 @@ func TestBooleanSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}` }`
@ -123,7 +123,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -134,7 +134,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -145,7 +145,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -156,7 +156,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -167,7 +167,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -178,7 +178,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -189,7 +189,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -200,7 +200,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -214,7 +214,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -225,7 +225,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -236,7 +236,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -247,7 +247,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -258,7 +258,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -326,7 +326,7 @@ func TestStructOfStructsSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -391,7 +391,7 @@ func TestStructOfMapsSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -481,7 +481,7 @@ func TestMapOfPrimitivesSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -518,7 +518,7 @@ func TestMapOfStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -556,7 +556,7 @@ func TestMapOfMapSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -661,7 +661,7 @@ func TestSliceOfMapSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -699,7 +699,7 @@ func TestSliceOfStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -757,7 +757,7 @@ func TestEmbeddedStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -797,7 +797,7 @@ func TestEmbeddedStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -892,7 +892,7 @@ func TestNonAnnotatedFieldsAreSkipped(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -934,7 +934,7 @@ func TestDashFieldsAreSkipped(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -987,7 +987,7 @@ func TestPointerInStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -1004,7 +1004,7 @@ func TestPointerInStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1018,7 +1018,7 @@ func TestPointerInStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -1035,7 +1035,7 @@ func TestPointerInStructSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1106,7 +1106,7 @@ func TestGenericSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1129,7 +1129,7 @@ func TestGenericSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1157,7 +1157,7 @@ func TestGenericSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1180,7 +1180,7 @@ func TestGenericSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1210,7 +1210,7 @@ func TestGenericSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1236,7 +1236,7 @@ func TestGenericSchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1322,7 +1322,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1333,7 +1333,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1347,7 +1347,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1429,7 +1429,7 @@ func TestDocIngestionForObject(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -1512,7 +1512,7 @@ func TestDocIngestionForSlice(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1524,7 +1524,7 @@ func TestDocIngestionForSlice(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -1611,7 +1611,7 @@ func TestDocIngestionForMap(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1623,7 +1623,7 @@ func TestDocIngestionForMap(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -1683,7 +1683,7 @@ func TestDocIngestionForTopLevelPrimitive(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
} }
@ -1761,7 +1761,7 @@ func TestInterfaceGeneratesEmptySchema(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1810,7 +1810,7 @@ func TestBundleReadOnlytag(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },
@ -1870,7 +1870,7 @@ func TestBundleInternalTag(t *testing.T) {
}, },
{ {
"type": "string", "type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
} }
] ]
}, },

View File

@ -0,0 +1,70 @@
package config_tests
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/stretchr/testify/require"
)
func TestComplexVariables(t *testing.T) {
b, diags := loadTargetWithDiags("variables/complex", "default")
require.Empty(t, diags)
diags = bundle.Apply(context.Background(), b, bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveVariableReferences(
"variables",
),
))
require.NoError(t, diags.Error())
require.Equal(t, "13.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
require.Equal(t, "some-policy-id", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.PolicyId)
require.Equal(t, 2, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.random"])
require.Equal(t, 3, len(b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries))
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
Jar: "/path/to/jar",
})
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
Egg: "/path/to/egg",
})
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
Whl: "/path/to/whl",
})
require.Equal(t, "task with spark version 13.2.x-scala2.11 and jar /path/to/jar", b.Config.Resources.Jobs["my_job"].Tasks[0].TaskKey)
}
func TestComplexVariablesOverride(t *testing.T) {
b, diags := loadTargetWithDiags("variables/complex", "dev")
require.Empty(t, diags)
diags = bundle.Apply(context.Background(), b, bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveVariableReferences(
"variables",
),
))
require.NoError(t, diags.Error())
require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
require.Equal(t, "Standard_DS3_v3", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
// Making sure the variable is overriden and not merged / extended
// These properties are set in the default target but not set in override target
// So they should be empty
require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.random"])
require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.PolicyId)
}

View File

@ -1,41 +0,0 @@
package config_tests
import (
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/stretchr/testify/assert"
)
func TestOverrideSyncTarget(t *testing.T) {
var b *bundle.Bundle
b = loadTarget(t, "./override_sync", "development")
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
b = loadTarget(t, "./override_sync", "staging")
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
b = loadTarget(t, "./override_sync", "prod")
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
}
func TestOverrideSyncTargetNoRootSync(t *testing.T) {
var b *bundle.Bundle
b = loadTarget(t, "./override_sync_no_root", "development")
assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
b = loadTarget(t, "./override_sync_no_root", "staging")
assert.ElementsMatch(t, []string{filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
b = loadTarget(t, "./override_sync_no_root", "prod")
assert.ElementsMatch(t, []string{}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
}

View File

@ -1,19 +1,26 @@
bundle:
name: quality_monitors
resources: resources:
quality_monitors: quality_monitors:
my_monitor: my_monitor:
table_name: "main.test.thing1" table_name: "main.test.dev"
assets_dir: "/Shared/provider-test/databricks_monitoring/main.test.thing1" assets_dir: "/Shared/provider-test/databricks_monitoring/main.test.thing1"
output_schema_name: "test" output_schema_name: "main.dev"
inference_log: inference_log:
granularities: ["1 day"] granularities: ["1 day"]
timestamp_col: "timestamp" timestamp_col: "timestamp"
prediction_col: "prediction" prediction_col: "prediction"
model_id_col: "model_id" model_id_col: "model_id"
problem_type: "PROBLEM_TYPE_REGRESSION" problem_type: "PROBLEM_TYPE_REGRESSION"
schedule:
quartz_cron_expression: "0 0 12 * * ?" # every day at noon
timezone_id: UTC
targets: targets:
development: development:
mode: development mode: development
default: true
resources: resources:
quality_monitors: quality_monitors:
my_monitor: my_monitor:
@ -24,14 +31,14 @@ targets:
quality_monitors: quality_monitors:
my_monitor: my_monitor:
table_name: "main.test.staging" table_name: "main.test.staging"
output_schema_name: "staging" output_schema_name: "main.staging"
production: production:
resources: resources:
quality_monitors: quality_monitors:
my_monitor: my_monitor:
table_name: "main.test.prod" table_name: "main.test.prod"
output_schema_name: "prod" output_schema_name: "main.prod"
inference_log: inference_log:
granularities: ["1 hour"] granularities: ["1 hour"]
timestamp_col: "timestamp_prod" timestamp_col: "timestamp_prod"

View File

@ -24,7 +24,7 @@ func TestMonitorTableNames(t *testing.T) {
p := b.Config.Resources.QualityMonitors["my_monitor"] p := b.Config.Resources.QualityMonitors["my_monitor"]
assert.Equal(t, "main.test.dev", p.TableName) assert.Equal(t, "main.test.dev", p.TableName)
assert.Equal(t, "/Shared/provider-test/databricks_monitoring/main.test.thing1", p.AssetsDir) assert.Equal(t, "/Shared/provider-test/databricks_monitoring/main.test.thing1", p.AssetsDir)
assert.Equal(t, "test", p.OutputSchemaName) assert.Equal(t, "main.dev", p.OutputSchemaName)
assertExpectedMonitor(t, p) assertExpectedMonitor(t, p)
} }
@ -36,7 +36,7 @@ func TestMonitorStaging(t *testing.T) {
p := b.Config.Resources.QualityMonitors["my_monitor"] p := b.Config.Resources.QualityMonitors["my_monitor"]
assert.Equal(t, "main.test.staging", p.TableName) assert.Equal(t, "main.test.staging", p.TableName)
assert.Equal(t, "/Shared/provider-test/databricks_monitoring/main.test.thing1", p.AssetsDir) assert.Equal(t, "/Shared/provider-test/databricks_monitoring/main.test.thing1", p.AssetsDir)
assert.Equal(t, "staging", p.OutputSchemaName) assert.Equal(t, "main.staging", p.OutputSchemaName)
assertExpectedMonitor(t, p) assertExpectedMonitor(t, p)
} }
@ -48,7 +48,7 @@ func TestMonitorProduction(t *testing.T) {
p := b.Config.Resources.QualityMonitors["my_monitor"] p := b.Config.Resources.QualityMonitors["my_monitor"]
assert.Equal(t, "main.test.prod", p.TableName) assert.Equal(t, "main.test.prod", p.TableName)
assert.Equal(t, "/Shared/provider-test/databricks_monitoring/main.test.thing1", p.AssetsDir) assert.Equal(t, "/Shared/provider-test/databricks_monitoring/main.test.thing1", p.AssetsDir)
assert.Equal(t, "prod", p.OutputSchemaName) assert.Equal(t, "main.prod", p.OutputSchemaName)
inferenceLog := p.InferenceLog inferenceLog := p.InferenceLog
assert.Equal(t, []string{"1 day", "1 hour"}, inferenceLog.Granularities) assert.Equal(t, []string{"1 day", "1 hour"}, inferenceLog.Granularities)

View File

@ -0,0 +1,5 @@
bundle:
name: "abc"
run_as:
service_principal_name: ""

View File

@ -0,0 +1,5 @@
bundle:
name: "abc"
run_as:
user_name: ""

View File

@ -0,0 +1,6 @@
bundle:
name: "abc"
run_as:
service_principal_name: ""
user_name: ""

View File

@ -198,27 +198,53 @@ func TestRunAsErrorWhenBothUserAndSpSpecified(t *testing.T) {
} }
func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) { func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) {
b := load(t, "./run_as/not_allowed/neither_sp_nor_user") tcases := []struct {
name string
err string
}{
{
name: "empty_run_as",
err: "run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified",
},
{
name: "empty_sp",
err: "run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified",
},
{
name: "empty_user",
err: "run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified",
},
{
name: "empty_user_and_sp",
err: "run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified",
},
}
ctx := context.Background() for _, tc := range tcases {
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { t.Run(tc.name, func(t *testing.T) {
b.Config.Workspace.CurrentUser = &config.User{
User: &iam.User{
UserName: "my_service_principal",
},
}
return nil
})
diags := bundle.Apply(ctx, b, mutator.SetRunAs()) bundlePath := fmt.Sprintf("./run_as/not_allowed/neither_sp_nor_user/%s", tc.name)
err := diags.Error() b := load(t, bundlePath)
configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/databricks.yml") ctx := context.Background()
assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:8", configPath)) bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
b.Config.Workspace.CurrentUser = &config.User{
User: &iam.User{
UserName: "my_service_principal",
},
}
return nil
})
diags := bundle.Apply(ctx, b, mutator.SetRunAs())
err := diags.Error()
assert.EqualError(t, err, tc.err)
})
}
} }
func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) { func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) {
b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user_override", "development") b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user/override", "development")
ctx := context.Background() ctx := context.Background()
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
@ -233,8 +259,7 @@ func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) {
diags := bundle.Apply(ctx, b, mutator.SetRunAs()) diags := bundle.Apply(ctx, b, mutator.SetRunAs())
err := diags.Error() err := diags.Error()
configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user_override/override.yml") assert.EqualError(t, err, "run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified")
assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:12", configPath))
} }
func TestLegacyRunAs(t *testing.T) { func TestLegacyRunAs(t *testing.T) {

View File

@ -4,14 +4,19 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/cli/cmd/root"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/stretchr/testify/require"
) )
func TestSuggestTargetIfWrongPassed(t *testing.T) { func TestSuggestTargetIfWrongPassed(t *testing.T) {
t.Setenv("BUNDLE_ROOT", filepath.Join("target_overrides", "workspace")) t.Setenv("BUNDLE_ROOT", filepath.Join("target_overrides", "workspace"))
_, _, err := internal.RequireErrorRun(t, "bundle", "validate", "-e", "incorrect") stdoutBytes, _, err := internal.RequireErrorRun(t, "bundle", "validate", "-e", "incorrect")
require.ErrorContains(t, err, "Available targets:") stdout := stdoutBytes.String()
require.ErrorContains(t, err, "development")
require.ErrorContains(t, err, "staging") assert.Error(t, root.ErrAlreadyPrinted, err)
assert.Contains(t, stdout, "Available targets:")
assert.Contains(t, stdout, "development")
assert.Contains(t, stdout, "staging")
} }

View File

@ -0,0 +1,19 @@
bundle:
name: sync_nil
workspace:
host: https://acme.cloud.databricks.com/
sync:
include: ~
exclude: ~
targets:
development:
staging:
sync:
include:
- tests/*
exclude:
- dist

View File

@ -0,0 +1,17 @@
bundle:
name: sync_nil_root
workspace:
host: https://acme.cloud.databricks.com/
sync: ~
targets:
development:
staging:
sync:
include:
- tests/*
exclude:
- dist

View File

@ -1,5 +1,5 @@
bundle: bundle:
name: override_sync name: sync_override
workspace: workspace:
host: https://acme.cloud.databricks.com/ host: https://acme.cloud.databricks.com/

View File

@ -1,5 +1,5 @@
bundle: bundle:
name: override_sync name: sync_override_no_root
workspace: workspace:
host: https://acme.cloud.databricks.com/ host: https://acme.cloud.databricks.com/

View File

@ -13,7 +13,7 @@ import (
) )
func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) { func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) {
b := loadTarget(t, "./override_sync", "development") b := loadTarget(t, "./sync/override", "development")
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.ValidateSyncPatterns()) diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.ValidateSyncPatterns())
require.Len(t, diags, 3) require.Len(t, diags, 3)
@ -21,7 +21,7 @@ func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) {
require.Equal(t, diags[0].Severity, diag.Warning) require.Equal(t, diags[0].Severity, diag.Warning)
require.Equal(t, diags[0].Summary, "Pattern dist does not match any files") require.Equal(t, diags[0].Summary, "Pattern dist does not match any files")
require.Equal(t, diags[0].Location.File, filepath.Join("override_sync", "databricks.yml")) require.Equal(t, diags[0].Location.File, filepath.Join("sync", "override", "databricks.yml"))
require.Equal(t, diags[0].Location.Line, 17) require.Equal(t, diags[0].Location.Line, 17)
require.Equal(t, diags[0].Location.Column, 11) require.Equal(t, diags[0].Location.Column, 11)
require.Equal(t, diags[0].Path.String(), "sync.exclude[0]") require.Equal(t, diags[0].Path.String(), "sync.exclude[0]")

65
bundle/tests/sync_test.go Normal file
View File

@ -0,0 +1,65 @@
package config_tests
import (
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/stretchr/testify/assert"
)
func TestSyncOverride(t *testing.T) {
var b *bundle.Bundle
b = loadTarget(t, "./sync/override", "development")
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
b = loadTarget(t, "./sync/override", "staging")
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
b = loadTarget(t, "./sync/override", "prod")
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
}
func TestSyncOverrideNoRootSync(t *testing.T) {
var b *bundle.Bundle
b = loadTarget(t, "./sync/override_no_root", "development")
assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
b = loadTarget(t, "./sync/override_no_root", "staging")
assert.ElementsMatch(t, []string{filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
b = loadTarget(t, "./sync/override_no_root", "prod")
assert.ElementsMatch(t, []string{}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
}
func TestSyncNil(t *testing.T) {
var b *bundle.Bundle
b = loadTarget(t, "./sync/nil", "development")
assert.Nil(t, b.Config.Sync.Include)
assert.Nil(t, b.Config.Sync.Exclude)
b = loadTarget(t, "./sync/nil", "staging")
assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
}
func TestSyncNilRoot(t *testing.T) {
var b *bundle.Bundle
b = loadTarget(t, "./sync/nil_root", "development")
assert.Nil(t, b.Config.Sync.Include)
assert.Nil(t, b.Config.Sync.Exclude)
b = loadTarget(t, "./sync/nil_root", "staging")
assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
}

View File

@ -0,0 +1,51 @@
bundle:
name: complex-variables
resources:
jobs:
my_job:
job_clusters:
- job_cluster_key: key
new_cluster: ${var.cluster}
tasks:
- task_key: test
job_cluster_key: key
libraries: ${variables.libraries.value}
task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}"
variables:
node_type:
default: "Standard_DS3_v2"
cluster:
type: complex
description: "A cluster definition"
default:
spark_version: "13.2.x-scala2.11"
node_type_id: ${var.node_type}
num_workers: 2
policy_id: "some-policy-id"
spark_conf:
spark.speculation: true
spark.databricks.delta.retentionDurationCheck.enabled: false
spark.random: true
libraries:
type: complex
description: "A libraries definition"
default:
- jar: "/path/to/jar"
- egg: "/path/to/egg"
- whl: "/path/to/whl"
targets:
default:
dev:
variables:
node_type: "Standard_DS3_v3"
cluster:
spark_version: "14.2.x-scala2.11"
node_type_id: ${var.node_type}
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false

Some files were not shown because too many files have changed in this diff Show More