merge after rename

This commit is contained in:
Shreyas Goenka 2023-05-17 14:06:31 +02:00
commit 4b37b94630
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
267 changed files with 2591 additions and 1547 deletions

View File

@ -5,10 +5,10 @@ package account
{{$excludes := list}}
import (
"github.com/databricks/bricks/cmd/root"
"github.com/databricks/cli/cmd/root"
"github.com/spf13/cobra"
{{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) }}
{{.SnakeName}} "github.com/databricks/bricks/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}}
{{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}}
)
var accountCmd = &cobra.Command{

View File

@ -5,9 +5,9 @@ package cmd
{{$excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions"}}
import (
"github.com/databricks/bricks/cmd/root"
"github.com/databricks/cli/cmd/root"
{{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) }}
{{.SnakeName}} "github.com/databricks/bricks/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}}
{{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}}
)
func init() {

View File

@ -3,9 +3,9 @@
package {{(.TrimPrefix "account").SnakeName}}
import (
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/bricks/libs/flags"
"github.com/databricks/bricks/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/databricks-sdk-go/service/{{.Package.Name}}"
"github.com/spf13/cobra"
)

View File

@ -52,23 +52,23 @@ jobs:
- name: Upload macOS binaries
uses: actions/upload-artifact@v3
with:
name: bricks_darwin_snapshot
name: cli_darwin_snapshot
path: |
dist/bricks_darwin_*/
dist/*_darwin_*/
- name: Upload Linux binaries
uses: actions/upload-artifact@v3
with:
name: bricks_linux_snapshot
name: cli_linux_snapshot
path: |
dist/bricks_linux_*/
dist/*_linux_*/
- name: Upload Windows binaries
uses: actions/upload-artifact@v3
with:
name: bricks_windows_snapshot
name: cli_windows_snapshot
path: |
dist/bricks_windows_*/
dist/*_windows_*/
- name: Update snapshot tag
@ -91,4 +91,4 @@ jobs:
tag_name: snapshot
token: ${{ secrets.GITHUB_TOKEN }}
files: |
dist/bricks*.zip
dist/databricks_cli_*.zip

4
.gitignore vendored
View File

@ -6,7 +6,7 @@
*.dll
*.so
*.dylib
bricks
cli
# Test binary, built with `go test -c`
*.test
@ -27,4 +27,4 @@ __pycache__
.terraform
.terraform.lock.hcl
.vscode/launch.json
.vscode/launch.json

View File

@ -9,24 +9,24 @@ builds:
- -trimpath
ldflags:
- '-s -w'
- -X github.com/databricks/bricks/internal/build.buildProjectName={{ .ProjectName }}
- -X github.com/databricks/bricks/internal/build.buildVersion={{ .Version }}
- -X github.com/databricks/cli/internal/build.buildProjectName={{ .ProjectName }}
- -X github.com/databricks/cli/internal/build.buildVersion={{ .Version }}
# Git information
- -X github.com/databricks/bricks/internal/build.buildBranch={{ .Branch }}
- -X github.com/databricks/bricks/internal/build.buildTag={{ .Tag }}
- -X github.com/databricks/bricks/internal/build.buildShortCommit={{ .ShortCommit }}
- -X github.com/databricks/bricks/internal/build.buildFullCommit={{ .FullCommit }}
- -X github.com/databricks/bricks/internal/build.buildCommitTimestamp={{ .CommitTimestamp }}
- -X github.com/databricks/bricks/internal/build.buildSummary={{ .Summary }}
- -X github.com/databricks/cli/internal/build.buildBranch={{ .Branch }}
- -X github.com/databricks/cli/internal/build.buildTag={{ .Tag }}
- -X github.com/databricks/cli/internal/build.buildShortCommit={{ .ShortCommit }}
- -X github.com/databricks/cli/internal/build.buildFullCommit={{ .FullCommit }}
- -X github.com/databricks/cli/internal/build.buildCommitTimestamp={{ .CommitTimestamp }}
- -X github.com/databricks/cli/internal/build.buildSummary={{ .Summary }}
# Version information
- -X github.com/databricks/bricks/internal/build.buildMajor={{ .Major }}
- -X github.com/databricks/bricks/internal/build.buildMinor={{ .Minor }}
- -X github.com/databricks/bricks/internal/build.buildPatch={{ .Patch }}
- -X github.com/databricks/bricks/internal/build.buildPrerelease={{ .Prerelease }}
- -X github.com/databricks/bricks/internal/build.buildIsSnapshot={{ .IsSnapshot }}
- -X github.com/databricks/bricks/internal/build.buildTimestamp={{ .Timestamp }}
- -X github.com/databricks/cli/internal/build.buildMajor={{ .Major }}
- -X github.com/databricks/cli/internal/build.buildMinor={{ .Minor }}
- -X github.com/databricks/cli/internal/build.buildPatch={{ .Patch }}
- -X github.com/databricks/cli/internal/build.buildPrerelease={{ .Prerelease }}
- -X github.com/databricks/cli/internal/build.buildIsSnapshot={{ .IsSnapshot }}
- -X github.com/databricks/cli/internal/build.buildTimestamp={{ .Timestamp }}
goos:
- windows
@ -41,7 +41,7 @@ builds:
goarch: '386'
- goos: linux
goarch: '386'
binary: '{{ .ProjectName }}'
binary: databricks
archives:
- format: zip
@ -49,10 +49,10 @@ archives:
# Snapshot archives must have a stable file name such that the artifacts in the nightly
# release are automatically overwritten. If the snapshot version is included in the
# file name then additional logic to clean up older builds would be needed.
name_template: '{{ .ProjectName }}_{{ if not .IsSnapshot }}{{ .Version }}_{{ end }}{{ .Os }}_{{ .Arch }}'
name_template: 'databricks_cli_{{ if not .IsSnapshot }}{{ .Version }}_{{ end }}{{ .Os }}_{{ .Arch }}'
checksum:
name_template: '{{ .ProjectName }}_{{ .Version }}_SHA256SUMS'
name_template: 'databricks_cli_{{ .Version }}_SHA256SUMS'
algorithm: sha256
snapshot:
name_template: '{{ incpatch .Version }}-dev+{{ .ShortCommit }}'

View File

@ -5,5 +5,7 @@
},
"files.trimTrailingWhitespace": true,
"files.insertFinalNewline": true,
"files.trimFinalNewlines": true
"files.trimFinalNewlines": true,
"python.envFile": "${workspaceFolder}/.databricks/.databricks.env",
"databricks.python.envFile": "${workspaceFolder}/.env"
}

View File

@ -1,5 +1,30 @@
# Version changelog
## 0.100.0
This release bumps the minor version to 100 to disambiguate between Databricks CLI "v1" (the Python version)
and this version, Databricks CLI "v2". This release is a major rewrite of the CLI, and is not backwards compatible.
CLI:
* Rename bricks -> databricks ([#389](https://github.com/databricks/cli/pull/389)).
Bundles:
* Added ability for deferred mutator execution ([#380](https://github.com/databricks/cli/pull/380)).
* Do not truncate local state file when pulling remote changes ([#382](https://github.com/databricks/cli/pull/382)).
## 0.0.32
* Add support for variables in bundle config. Introduces 4 ways of setting variable values, which in decreasing order of priority are: ([#383](https://github.com/databricks/cli/pull/383))([#359](https://github.com/databricks/cli/pull/359)).
1. Command line flag. For example: `--var="foo=bar"`
2. Environment variable. eg: BUNDLE_VAR_foo=bar
3. Default value as defined in the applicable environments block
4. Default value defined in variable definition
* Make the git details bundle config block optional ([#372](https://github.com/databricks/cli/pull/372)).
* Fix api post integration tests ([#371](https://github.com/databricks/cli/pull/371)).
* Fix table of content by removing not required top-level item ([#366](https://github.com/databricks/cli/pull/366)).
* Fix printing the tasks in job output in DAG execution order ([#377](https://github.com/databricks/cli/pull/377)).
* Improved error message when 'bricks bundle run' is executed before 'bricks bundle deploy' ([#378](https://github.com/databricks/cli/pull/378)).
## 0.0.31
* Add OpenAPI command coverage (both workspace and account level APIs).
@ -10,4 +35,4 @@
## 0.0.30
* Initial preview release of the Bricks CLI.
* Initial preview release of the Databricks CLI.

View File

@ -1,6 +1,6 @@
# Bricks CLI
# Databricks CLI
[![build](https://github.com/databricks/bricks/workflows/build/badge.svg?branch=main)](https://github.com/databricks/bricks/actions?query=workflow%3Abuild+branch%3Amain)
[![build](https://github.com/databricks/cli/workflows/build/badge.svg?branch=main)](https://github.com/databricks/cli/actions?query=workflow%3Abuild+branch%3Amain)
This project is in private preview.
@ -12,9 +12,9 @@ Documentation is available at https://docs.databricks.com/dev-tools/cli/bricks-c
This CLI is packaged as a dependency-free binary executable and may be located in any directory.
For convenient access, copy the `bricks` binary to any directory listed in `$PATH`.
For convenient access, copy the `databricks` binary to any directory listed in `$PATH`.
Confirm the binary works by executing `bricks version`.
Confirm the binary works by executing `databricks version`.
## Authentication

View File

@ -4,7 +4,7 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
)

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/artifacts/notebook"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts/notebook"
)
func BuildAll() bundle.Mutator {

View File

@ -9,7 +9,7 @@ import (
"path/filepath"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"github.com/databricks/databricks-sdk-go/service/workspace"
)

View File

@ -8,7 +8,7 @@ import (
"os"
"path"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"github.com/databricks/databricks-sdk-go/service/workspace"
)

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/artifacts/notebook"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts/notebook"
)
func UploadAll() bundle.Mutator {

View File

@ -12,11 +12,11 @@ import (
"path/filepath"
"sync"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/folders"
"github.com/databricks/bricks/libs/git"
"github.com/databricks/bricks/libs/locker"
"github.com/databricks/bricks/libs/terraform"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/folders"
"github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/terraform"
"github.com/databricks/databricks-sdk-go"
sdkconfig "github.com/databricks/databricks-sdk-go/config"
"github.com/hashicorp/terraform-exec/tfexec"

View File

@ -14,4 +14,9 @@ type Environment struct {
Artifacts map[string]*Artifact `json:"artifacts,omitempty"`
Resources *Resources `json:"resources,omitempty"`
// Override default values for defined variables
// Does not permit defining new variables or redefining existing ones
// in the scope of an environment
Variables map[string]string `json:"variables,omitempty"`
}

View File

@ -9,7 +9,8 @@ import (
"sort"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/variable"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
)
@ -127,6 +128,13 @@ func (a *accumulator) walk(scope []string, rv reflect.Value, s setter) {
case reflect.String:
path := strings.Join(scope, Delimiter)
a.strings[path] = newStringField(path, anyGetter{rv}, s)
// register alias for variable value. `var.foo` would be the alias for
// `variables.foo.value`
if len(scope) == 3 && scope[0] == "variables" && scope[2] == "value" {
aliasPath := strings.Join([]string{variable.VariableReferencePrefix, scope[1]}, Delimiter)
a.strings[aliasPath] = a.strings[path]
}
case reflect.Struct:
a.walkStruct(scope, rv)
case reflect.Map:
@ -174,7 +182,7 @@ func (a *accumulator) Resolve(path string, seenPaths []string, fns ...LookupFunc
// fetch the string node to resolve
field, ok := a.strings[path]
if !ok {
return fmt.Errorf("could not find string field with path %s", path)
return fmt.Errorf("could not resolve reference %s", path)
}
// return early if the string field has no variables to interpolate

View File

@ -3,6 +3,8 @@ package interpolation
import (
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -125,3 +127,70 @@ func TestInterpolationVariableLoopError(t *testing.T) {
err := expand(&f)
assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b")
}
func TestInterpolationForVariables(t *testing.T) {
foo := "abc"
bar := "${var.foo} def"
apple := "${var.foo} ${var.bar}"
config := config.Root{
Variables: map[string]*variable.Variable{
"foo": {
Value: &foo,
},
"bar": {
Value: &bar,
},
"apple": {
Value: &apple,
},
},
Bundle: config.Bundle{
Name: "${var.apple} ${var.foo}",
},
}
err := expand(&config)
assert.NoError(t, err)
assert.Equal(t, "abc", *(config.Variables["foo"].Value))
assert.Equal(t, "abc def", *(config.Variables["bar"].Value))
assert.Equal(t, "abc abc def", *(config.Variables["apple"].Value))
assert.Equal(t, "abc abc def abc", config.Bundle.Name)
}
func TestInterpolationLoopForVariables(t *testing.T) {
foo := "${var.bar}"
bar := "${var.foo}"
config := config.Root{
Variables: map[string]*variable.Variable{
"foo": {
Value: &foo,
},
"bar": {
Value: &bar,
},
},
Bundle: config.Bundle{
Name: "${var.foo}",
},
}
err := expand(&config)
assert.ErrorContains(t, err, "cycle detected in field resolution: bundle.name -> var.foo -> var.bar -> var.foo")
}
func TestInterpolationInvalidVariableReference(t *testing.T) {
foo := "abc"
config := config.Root{
Variables: map[string]*variable.Variable{
"foo": {
Value: &foo,
},
},
Bundle: config.Bundle{
Name: "${vars.foo}",
},
}
err := expand(&config)
assert.ErrorContains(t, err, "could not resolve reference vars.foo")
}

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
)
type defineDefaultEnvironment struct {

View File

@ -4,9 +4,9 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -3,7 +3,7 @@ package mutator
import (
"context"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"golang.org/x/exp/slices"
)

View File

@ -4,8 +4,8 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -5,7 +5,7 @@ import (
"fmt"
"path"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
type defineDefaultWorkspacePaths struct{}

View File

@ -4,9 +4,9 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -4,7 +4,7 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
type defineDefaultWorkspaceRoot struct{}

View File

@ -4,9 +4,9 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -6,7 +6,7 @@ import (
"path"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
type expandWorkspaceRoot struct{}

View File

@ -4,9 +4,9 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"

View File

@ -3,9 +3,9 @@ package mutator
import (
"context"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/git"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/git"
"github.com/databricks/cli/libs/log"
)
type loadGitDetails struct{}

View File

@ -1,7 +1,7 @@
package mutator
import (
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
func DefaultMutators() []bundle.Mutator {

View File

@ -3,7 +3,7 @@ package mutator
import (
"context"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
type populateCurrentUser struct{}

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
)
type processInclude struct {

View File

@ -7,9 +7,9 @@ import (
"path/filepath"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -5,8 +5,8 @@ import (
"fmt"
"path/filepath"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"golang.org/x/exp/slices"
)

View File

@ -7,9 +7,9 @@ import (
"runtime"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -5,7 +5,7 @@ import (
"fmt"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"golang.org/x/exp/maps"
)

View File

@ -4,9 +4,9 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
)

View File

@ -4,7 +4,7 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
type selectEnvironment struct {

View File

@ -4,9 +4,9 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -0,0 +1,63 @@
package mutator
import (
"context"
"fmt"
"os"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/variable"
)
const bundleVarPrefix = "BUNDLE_VAR_"
type setVariables struct{}
func SetVariables() bundle.Mutator {
return &setVariables{}
}
func (m *setVariables) Name() string {
return "SetVariables"
}
func setVariable(v *variable.Variable, name string) error {
// case: variable already has value initialized, so skip
if v.HasValue() {
return nil
}
// case: read and set variable value from process environment
envVarName := bundleVarPrefix + name
if val, ok := os.LookupEnv(envVarName); ok {
err := v.Set(val)
if err != nil {
return fmt.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %w`, val, name, envVarName, err)
}
return nil
}
// case: Set the variable to its default value
if v.HasDefault() {
err := v.Set(*v.Default)
if err != nil {
return fmt.Errorf(`failed to assign default value from config "%s" to variable %s with error: %w`, *v.Default, name, err)
}
return nil
}
// We should have had a value to set for the variable at this point.
// TODO: use cmdio to request values for unassigned variables if current
// terminal is a tty. Tracked in https://github.com/databricks/cli/issues/379
return fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
}
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutator, error) {
for name, variable := range b.Config.Variables {
err := setVariable(variable, name)
if err != nil {
return nil, err
}
}
return nil, nil
}

View File

@ -0,0 +1,116 @@
package mutator
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSetVariableFromProcessEnvVar(t *testing.T) {
defaultVal := "default"
variable := variable.Variable{
Description: "a test variable",
Default: &defaultVal,
}
// set value for variable as an environment variable
t.Setenv("BUNDLE_VAR_foo", "process-env")
err := setVariable(&variable, "foo")
require.NoError(t, err)
assert.Equal(t, *variable.Value, "process-env")
}
func TestSetVariableUsingDefaultValue(t *testing.T) {
defaultVal := "default"
variable := variable.Variable{
Description: "a test variable",
Default: &defaultVal,
}
err := setVariable(&variable, "foo")
require.NoError(t, err)
assert.Equal(t, *variable.Value, "default")
}
func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
defaultVal := "default"
val := "assigned-value"
variable := variable.Variable{
Description: "a test variable",
Default: &defaultVal,
Value: &val,
}
// since a value is already assigned to the variable, it would not be overridden
// by the default value
err := setVariable(&variable, "foo")
require.NoError(t, err)
assert.Equal(t, *variable.Value, "assigned-value")
}
func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
defaultVal := "default"
val := "assigned-value"
variable := variable.Variable{
Description: "a test variable",
Default: &defaultVal,
Value: &val,
}
// set value for variable as an environment variable
t.Setenv("BUNDLE_VAR_foo", "process-env")
// since a value is already assigned to the variable, it would not be overridden
// by the value from environment
err := setVariable(&variable, "foo")
require.NoError(t, err)
assert.Equal(t, *variable.Value, "assigned-value")
}
func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
variable := variable.Variable{
Description: "a test variable with no default",
}
// fails because we could not resolve a value for the variable
err := setVariable(&variable, "foo")
assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable")
}
func TestSetVariablesMutator(t *testing.T) {
defaultValForA := "default-a"
defaultValForB := "default-b"
valForC := "assigned-val-c"
bundle := &bundle.Bundle{
Config: config.Root{
Variables: map[string]*variable.Variable{
"a": {
Description: "resolved to default value",
Default: &defaultValForA,
},
"b": {
Description: "resolved from environment vairables",
Default: &defaultValForB,
},
"c": {
Description: "has already been assigned a value",
Value: &valForC,
},
},
},
}
t.Setenv("BUNDLE_VAR_b", "env-var-b")
_, err := SetVariables().Apply(context.Background(), bundle)
require.NoError(t, err)
assert.Equal(t, "default-a", *bundle.Config.Variables["a"].Value)
assert.Equal(t, "env-var-b", *bundle.Config.Variables["b"].Value)
assert.Equal(t, "assigned-val-c", *bundle.Config.Variables["c"].Value)
}

View File

@ -8,8 +8,8 @@ import (
"path/filepath"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/notebook"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/notebook"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines"
)

View File

@ -6,10 +6,10 @@ import (
"path/filepath"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/stretchr/testify/assert"

View File

@ -3,7 +3,7 @@ package config
import (
"fmt"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/cli/bundle/config/resources"
)
// Resources defines Databricks resources associated with the bundle.

View File

@ -3,7 +3,7 @@ package config
import (
"testing"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/cli/bundle/config/resources"
"github.com/stretchr/testify/assert"
)

View File

@ -1,9 +1,12 @@
package config
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/databricks/cli/bundle/config/variable"
"github.com/ghodss/yaml"
"github.com/imdario/mergo"
)
@ -16,6 +19,9 @@ type Root struct {
// It is set when loading `bundle.yml`.
Path string `json:"-" bundle:"readonly"`
// Contains user defined variables
Variables map[string]*variable.Variable `json:"variables,omitempty"`
// Bundle contains details about this bundle, such as its name,
// version of the spec (TODO), default cluster, default warehouse, etc.
Bundle Bundle `json:"bundle"`
@ -79,6 +85,29 @@ func (r *Root) SetConfigFilePath(path string) {
}
}
// Initializes variables using values passed from the command line flag
// Input has to be a string of the form `foo=bar`. In this case the variable with
// name `foo` is assigned the value `bar`
func (r *Root) InitializeVariables(vars []string) error {
for _, variable := range vars {
parsedVariable := strings.SplitN(variable, "=", 2)
if len(parsedVariable) != 2 {
return fmt.Errorf("unexpected flag value for variable assignment: %s", variable)
}
name := parsedVariable[0]
val := parsedVariable[1]
if _, ok := r.Variables[name]; !ok {
return fmt.Errorf("variable %s has not been defined", name)
}
err := r.Variables[name].Set(val)
if err != nil {
return fmt.Errorf("failed to assign %s to %s: %s", val, name, err)
}
}
return nil
}
func (r *Root) Load(path string) error {
raw, err := os.ReadFile(path)
if err != nil {
@ -146,5 +175,17 @@ func (r *Root) MergeEnvironment(env *Environment) error {
}
}
if env.Variables != nil {
for k, v := range env.Variables {
variable, ok := r.Variables[k]
if !ok {
return fmt.Errorf("variable %s is not defined but is assigned a value", k)
}
// we only allow overrides of the default value for a variable
defaultVal := v
variable.Default = &defaultVal
}
}
return nil
}

View File

@ -5,6 +5,7 @@ import (
"reflect"
"testing"
"github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -93,3 +94,63 @@ func TestDuplicateIdOnMergeReturnsError(t *testing.T) {
err = root.Merge(other)
assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/bundle.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)")
}
func TestInitializeVariables(t *testing.T) {
fooDefault := "abc"
root := &Root{
Variables: map[string]*variable.Variable{
"foo": {
Default: &fooDefault,
Description: "an optional variable since default is defined",
},
"bar": {
Description: "a required variable",
},
},
}
err := root.InitializeVariables([]string{"foo=123", "bar=456"})
assert.NoError(t, err)
assert.Equal(t, "123", *(root.Variables["foo"].Value))
assert.Equal(t, "456", *(root.Variables["bar"].Value))
}
func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) {
root := &Root{
Variables: map[string]*variable.Variable{
"foo": {
Description: "a variable called foo",
},
},
}
err := root.InitializeVariables([]string{"foo=123=567"})
assert.NoError(t, err)
assert.Equal(t, "123=567", *(root.Variables["foo"].Value))
}
func TestInitializeVariablesInvalidFormat(t *testing.T) {
root := &Root{
Variables: map[string]*variable.Variable{
"foo": {
Description: "a variable called foo",
},
},
}
err := root.InitializeVariables([]string{"foo"})
assert.ErrorContains(t, err, "unexpected flag value for variable assignment: foo")
}
func TestInitializeVariablesUndefinedVariables(t *testing.T) {
root := &Root{
Variables: map[string]*variable.Variable{
"foo": {
Description: "A required variable",
},
},
}
err := root.InitializeVariables([]string{"bar=567"})
assert.ErrorContains(t, err, "variable bar has not been defined")
}

View File

@ -0,0 +1,46 @@
package variable
import (
"fmt"
)
const VariableReferencePrefix = "var"
// An input variable for the bundle config
type Variable struct {
// A default value which then makes the variable optional
Default *string `json:"default,omitempty"`
// Documentation for this input variable
Description string `json:"description,omitempty"`
// This field stores the resolved value for the variable. The variable are
// resolved in the following priority order (from highest to lowest)
//
// 1. Command line flag. For example: `--var="foo=bar"`
// 2. Environment variable. eg: BUNDLE_VAR_foo=bar
// 3. Default value as defined in the applicable environments block
// 4. Default value defined in variable definition
// 5. Throw error, since if no default value is defined, then the variable
// is required
Value *string `json:"value,omitempty" bundle:"readonly"`
}
// True if the variable has been assigned a default value. Variables without a
// a default value are by defination required
func (v *Variable) HasDefault() bool {
return v.Default != nil
}
// True if variable has already been assigned a value
func (v *Variable) HasValue() bool {
return v.Value != nil
}
func (v *Variable) Set(val string) error {
if v.HasValue() {
return fmt.Errorf("variable has already been assigned value: %s", *v.Value)
}
v.Value = &val
return nil
}

View File

@ -4,7 +4,7 @@ import (
"os"
"path/filepath"
"github.com/databricks/bricks/libs/databrickscfg"
"github.com/databricks/cli/libs/databrickscfg"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/service/iam"

35
bundle/deferred.go Normal file
View File

@ -0,0 +1,35 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/errs"
)
type DeferredMutator struct {
mutators []Mutator
finally []Mutator
}
func (d *DeferredMutator) Name() string {
return "deferred"
}
func Defer(mutators []Mutator, finally []Mutator) []Mutator {
return []Mutator{
&DeferredMutator{
mutators: mutators,
finally: finally,
},
}
}
func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) ([]Mutator, error) {
mainErr := Apply(ctx, b, d.mutators)
errOnFinish := Apply(ctx, b, d.finally)
if mainErr != nil || errOnFinish != nil {
return nil, errs.FromMany(mainErr, errOnFinish)
}
return nil, nil
}

108
bundle/deferred_test.go Normal file
View File

@ -0,0 +1,108 @@
package bundle
import (
"context"
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
type mutatorWithError struct {
applyCalled int
errorMsg string
}
func (t *mutatorWithError) Name() string {
return "mutatorWithError"
}
func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) ([]Mutator, error) {
t.applyCalled++
return nil, fmt.Errorf(t.errorMsg)
}
func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
cleanup := &testMutator{}
deferredMutator := Defer([]Mutator{m1, m2, m3}, []Mutator{cleanup})
bundle := &Bundle{}
err := Apply(context.Background(), bundle, deferredMutator)
assert.NoError(t, err)
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenFirstFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer([]Mutator{mErr, m1, m2}, []Mutator{cleanup})
bundle := &Bundle{}
err := Apply(context.Background(), bundle, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred")
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer([]Mutator{m1, mErr, m2}, []Mutator{cleanup})
bundle := &Bundle{}
err := Apply(context.Background(), bundle, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenLastOneFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer([]Mutator{m1, m2, mErr}, []Mutator{cleanup})
bundle := &Bundle{}
err := Apply(context.Background(), bundle, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorCombinesErrorMessages(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanupErr := &mutatorWithError{errorMsg: "cleanup error occurred"}
deferredMutator := Defer([]Mutator{m1, m2, mErr}, []Mutator{cleanupErr})
bundle := &Bundle{}
err := Apply(context.Background(), bundle, deferredMutator)
assert.ErrorContains(t, err, "mutator error occurred\ncleanup error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 1, cleanupErr.applyCalled)
}

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/fatih/color"
)

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/sync"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/sync"
)
func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) {

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio"
)
type upload struct{}

View File

@ -3,9 +3,9 @@ package lock
import (
"context"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/locker"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log"
)
type acquire struct{}

View File

@ -3,8 +3,8 @@ package lock
import (
"context"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/log"
)
type release struct{}

View File

@ -4,8 +4,8 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio"
"github.com/hashicorp/terraform-exec/tfexec"
)

View File

@ -4,9 +4,9 @@ import (
"encoding/json"
"fmt"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/bricks/bundle/internal/tf/schema"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
tfjson "github.com/hashicorp/terraform-json"
)
@ -154,14 +154,6 @@ func BundleToTerraform(config *config.Root) *schema.Root {
}
func TerraformToBundle(state *tfjson.State, config *config.Root) error {
if state.Values == nil {
return fmt.Errorf("state.Values not set")
}
if state.Values.RootModule == nil {
return fmt.Errorf("state.Values.RootModule not set")
}
for _, resource := range state.Values.RootModule.Resources {
// Limit to resources.
if resource.Mode != tfjson.ManagedResourceMode {

View File

@ -3,8 +3,8 @@ package terraform
import (
"testing"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml"

View File

@ -5,8 +5,8 @@ import (
"fmt"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio"
"github.com/fatih/color"
"github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json"

View File

@ -1,7 +1,7 @@
package terraform
import (
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
// Dir returns the Terraform working directory for a given bundle.

View File

@ -8,9 +8,9 @@ import (
"path/filepath"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/log"
"github.com/hashicorp/go-version"
"github.com/hashicorp/hc-install/product"
"github.com/hashicorp/hc-install/releases"

View File

@ -5,8 +5,8 @@ import (
"os/exec"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/require"
)

View File

@ -4,8 +4,8 @@ import (
"fmt"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/interpolation"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/interpolation"
)
// Rewrite variable references to resources into Terraform compatible format.

View File

@ -4,8 +4,9 @@ import (
"context"
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
"github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json"
)
type load struct{}
@ -30,6 +31,11 @@ func (l *load) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutator, e
return nil, err
}
err = ValidateState(state)
if err != nil {
return nil, err
}
// Merge state into configuration.
err = TerraformToBundle(state, &b.Config)
if err != nil {
@ -39,6 +45,18 @@ func (l *load) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutator, e
return nil, nil
}
func ValidateState(state *tfjson.State) error {
if state.Values == nil {
return fmt.Errorf("no deployment state. Did you forget to run 'databricks bundle deploy'?")
}
if state.Values.RootModule == nil {
return fmt.Errorf("malformed terraform state: RootModule not set")
}
return nil
}
func Load() bundle.Mutator {
return &load{}
}

View File

@ -0,0 +1,41 @@
package terraform
import (
"context"
"os/exec"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/require"
)
func TestLoadWithNoState(t *testing.T) {
_, err := exec.LookPath("terraform")
if err != nil {
t.Skipf("cannot find terraform binary: %s", err)
}
b := &bundle.Bundle{
Config: config.Root{
Path: t.TempDir(),
Bundle: config.Bundle{
Environment: "whatever",
Terraform: &config.Terraform{
ExecPath: "terraform",
},
},
},
}
t.Setenv("DATABRICKS_HOST", "https://x")
t.Setenv("DATABRICKS_TOKEN", "foobar")
b.WorkspaceClient()
err = bundle.Apply(context.Background(), b, []bundle.Mutator{
Initialize(),
Load(),
})
require.ErrorContains(t, err, "Did you forget to run 'databricks bundle deploy'")
}

View File

@ -5,9 +5,9 @@ import (
"fmt"
"path/filepath"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/bricks/libs/terraform"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/terraform"
"github.com/hashicorp/terraform-exec/tfexec"
)

View File

@ -6,9 +6,9 @@ import (
"os"
"path/filepath"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/filer"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/apierr"
)
@ -42,10 +42,20 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutat
}
// Expect the state file to live under dir.
local, err := os.OpenFile(filepath.Join(dir, TerraformStateFileName), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0600)
local, err := os.OpenFile(filepath.Join(dir, TerraformStateFileName), os.O_CREATE|os.O_RDWR, 0600)
if err != nil {
return nil, err
}
defer local.Close()
if !IsLocalStateStale(local, remote) {
log.Infof(ctx, "Local state is the same or newer, ignoring remote state")
return nil, nil
}
// Truncating the file before writing
local.Truncate(0)
local.Seek(0, 0)
// Write file to disk.
log.Infof(ctx, "Writing remote state file to local cache directory")

View File

@ -5,9 +5,9 @@ import (
"os"
"path/filepath"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/filer"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/log"
)
type statePush struct{}

View File

@ -0,0 +1,38 @@
package terraform
import (
"encoding/json"
"io"
)
type state struct {
Serial int `json:"serial"`
}
func IsLocalStateStale(local io.Reader, remote io.Reader) bool {
localState, err := loadState(local)
if err != nil {
return true
}
remoteState, err := loadState(remote)
if err != nil {
return false
}
return localState.Serial < remoteState.Serial
}
func loadState(input io.Reader) (*state, error) {
content, err := io.ReadAll(input)
if err != nil {
return nil, err
}
var s state
err = json.Unmarshal(content, &s)
if err != nil {
return nil, err
}
return &s, nil
}

View File

@ -0,0 +1,93 @@
package terraform
import (
"fmt"
"io"
"testing"
"testing/iotest"
"github.com/stretchr/testify/assert"
)
type mockedReader struct {
content string
}
func (r *mockedReader) Read(p []byte) (n int, err error) {
content := []byte(r.content)
n = copy(p, content)
return n, io.EOF
}
func TestLocalStateIsNewer(t *testing.T) {
local := &mockedReader{content: `
{
"serial": 5
}
`}
remote := &mockedReader{content: `
{
"serial": 4
}
`}
stale := IsLocalStateStale(local, remote)
assert.False(t, stale)
}
func TestLocalStateIsOlder(t *testing.T) {
local := &mockedReader{content: `
{
"serial": 5
}
`}
remote := &mockedReader{content: `
{
"serial": 6
}
`}
stale := IsLocalStateStale(local, remote)
assert.True(t, stale)
}
func TestLocalStateIsTheSame(t *testing.T) {
local := &mockedReader{content: `
{
"serial": 5
}
`}
remote := &mockedReader{content: `
{
"serial": 5
}
`}
stale := IsLocalStateStale(local, remote)
assert.False(t, stale)
}
func TestLocalStateMarkStaleWhenFailsToLoad(t *testing.T) {
local := iotest.ErrReader(fmt.Errorf("Random error"))
remote := &mockedReader{content: `
{
"serial": 5
}
`}
stale := IsLocalStateStale(local, remote)
assert.True(t, stale)
}
func TestLocalStateMarkNonStaleWhenRemoteFailsToLoad(t *testing.T) {
local := &mockedReader{content: `
{
"serial": 5
}
`}
remote := iotest.ErrReader(fmt.Errorf("Random error"))
stale := IsLocalStateStale(local, remote)
assert.False(t, stale)
}

View File

@ -6,7 +6,7 @@ import (
"os"
"path/filepath"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
type write struct{}

View File

@ -7,8 +7,8 @@ import (
"path/filepath"
"strings"
"github.com/databricks/bricks/libs/locker"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/hashicorp/terraform-exec/tfexec"
)

View File

@ -1,4 +1,4 @@
module github.com/databricks/bricks/bundle/internal/tf/codegen
module github.com/databricks/cli/bundle/internal/tf/codegen
go 1.18

View File

@ -4,8 +4,8 @@ import (
"context"
"log"
"github.com/databricks/bricks/bundle/internal/tf/codegen/generator"
"github.com/databricks/bricks/bundle/internal/tf/codegen/schema"
"github.com/databricks/cli/bundle/internal/tf/codegen/generator"
"github.com/databricks/cli/bundle/internal/tf/codegen/schema"
)
func main() {

View File

@ -3,7 +3,7 @@ package bundle
import (
"context"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/libs/log"
)
// Mutator is the interface type that mutates a bundle's configuration or internal state.

View File

@ -1,9 +1,9 @@
package phases
import (
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/artifacts"
"github.com/databricks/bricks/bundle/config/interpolation"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/cli/bundle/config/interpolation"
)
// The build phase builds artifacts.

View File

@ -1,27 +1,30 @@
package phases
import (
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/artifacts"
"github.com/databricks/bricks/bundle/deploy/files"
"github.com/databricks/bricks/bundle/deploy/lock"
"github.com/databricks/bricks/bundle/deploy/terraform"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/bundle/deploy/lock"
"github.com/databricks/cli/bundle/deploy/terraform"
)
// The deploy phase deploys artifacts and resources.
func Deploy() bundle.Mutator {
deployPhase := bundle.Defer([]bundle.Mutator{
lock.Acquire(),
files.Upload(),
artifacts.UploadAll(),
terraform.Interpolate(),
terraform.Write(),
terraform.StatePull(),
terraform.Apply(),
terraform.StatePush(),
}, []bundle.Mutator{
lock.Release(),
})
return newPhase(
"deploy",
[]bundle.Mutator{
lock.Acquire(),
files.Upload(),
artifacts.UploadAll(),
terraform.Interpolate(),
terraform.Write(),
terraform.StatePull(),
terraform.Apply(),
terraform.StatePush(),
lock.Release(),
},
deployPhase,
)
}

View File

@ -1,24 +1,27 @@
package phases
import (
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/deploy/files"
"github.com/databricks/bricks/bundle/deploy/lock"
"github.com/databricks/bricks/bundle/deploy/terraform"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/cli/bundle/deploy/lock"
"github.com/databricks/cli/bundle/deploy/terraform"
)
// The destroy phase deletes artifacts and resources.
func Destroy() bundle.Mutator {
destroyPhase := bundle.Defer([]bundle.Mutator{
lock.Acquire(),
terraform.StatePull(),
terraform.Plan(terraform.PlanGoal("destroy")),
terraform.Destroy(),
terraform.StatePush(),
files.Delete(),
}, []bundle.Mutator{
lock.Release(),
})
return newPhase(
"destroy",
[]bundle.Mutator{
lock.Acquire(),
terraform.StatePull(),
terraform.Plan(terraform.PlanGoal("destroy")),
terraform.Destroy(),
terraform.StatePush(),
lock.Release(),
files.Delete(),
},
destroyPhase,
)
}

View File

@ -1,10 +1,11 @@
package phases
import (
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/interpolation"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/bricks/bundle/deploy/terraform"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/interpolation"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/bundle/deploy/terraform"
)
// The initialize phase fills in defaults and connects to the workspace.
@ -18,9 +19,11 @@ func Initialize() bundle.Mutator {
mutator.DefineDefaultWorkspaceRoot(),
mutator.ExpandWorkspaceRoot(),
mutator.DefineDefaultWorkspacePaths(),
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath("bundle"),
interpolation.IncludeLookupsInPath("workspace"),
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
),
mutator.TranslatePaths(),
terraform.Initialize(),

View File

@ -4,8 +4,8 @@ package phases
import (
"context"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/log"
)
// This phase type groups mutators that belong to a lifecycle phase.

View File

@ -4,8 +4,8 @@ import (
"fmt"
"os"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/folders"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/folders"
)
const envBundleRoot = "BUNDLE_ROOT"

View File

@ -5,7 +5,7 @@ import (
"path/filepath"
"testing"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/require"
)

View File

@ -6,12 +6,12 @@ import (
"strconv"
"time"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/bricks/bundle/run/output"
"github.com/databricks/bricks/bundle/run/progress"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/bundle/run/progress"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/retries"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/fatih/color"

View File

@ -3,7 +3,7 @@ package run
import (
"fmt"
"github.com/databricks/bricks/bundle"
"github.com/databricks/cli/bundle"
)
// RunnerLookup maps identifiers to a list of workloads that match that identifier.

View File

@ -3,9 +3,9 @@ package run
import (
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/stretchr/testify/assert"
)

View File

@ -6,12 +6,12 @@ import (
"strings"
"time"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/bricks/bundle/run/output"
"github.com/databricks/bricks/bundle/run/progress"
"github.com/databricks/bricks/libs/cmdio"
"github.com/databricks/bricks/libs/log"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/bundle/run/progress"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/pipelines"
flag "github.com/spf13/pflag"
)

View File

@ -43,7 +43,7 @@ func (event *ProgressEvent) IsInplaceSupported() bool {
return false
}
// TODO: Add inplace logging to pipelines. https://github.com/databricks/bricks/issues/280
// TODO: Add inplace logging to pipelines. https://github.com/databricks/cli/issues/280
type UpdateTracker struct {
UpdateId string
PipelineId string

View File

@ -5,8 +5,8 @@ import (
"fmt"
"strings"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/run/output"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/run/output"
)
type key string

View File

@ -3,9 +3,9 @@ package run
import (
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/bricks/bundle/config/resources"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/stretchr/testify/assert"
)

View File

@ -14,14 +14,14 @@ These descriptions are rendered in the inline documentation in an IDE
### SOP: Add schema descriptions for new fields in bundle config
1. You can autogenerate empty descriptions for the new fields by running
`bricks bundle schema --only-docs > ~/bricks/bundle/schema/docs/bundle_descriptions.json`
`databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json`
2. Manually edit bundle_descriptions.json to add your descriptions
3. Build again to embed the new `bundle_descriptions.json` into the binary (`go build`)
4. Again run `bricks bundle schema --only-docs > ~/bricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments`
4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments`
5. push to repo
### SOP: Update descriptions in resources from a newer openapi spec
1. Run `bricks bundle schema --only-docs --openapi PATH_TO_SPEC > ~/bricks/bundle/schema/docs/bundle_descriptions.json`
1. Run `databricks bundle schema --only-docs --openapi PATH_TO_SPEC > ~/databricks/bundle/schema/docs/bundle_descriptions.json`
2. push to repo

View File

@ -7,7 +7,7 @@ import (
"os"
"reflect"
"github.com/databricks/bricks/bundle/config"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/databricks-sdk-go/openapi"
)

View File

@ -160,7 +160,7 @@ func (reader *OpenapiReader) jobsDocs() (*Docs, error) {
}
jobDocs := schemaToDocs(jobSettingsSchema)
// TODO: add description for id if needed.
// Tracked in https://github.com/databricks/bricks/issues/242
// Tracked in https://github.com/databricks/cli/issues/242
jobsDocs := &Docs{
Description: "List of job definations",
AdditionalProperties: jobDocs,
@ -175,7 +175,7 @@ func (reader *OpenapiReader) pipelinesDocs() (*Docs, error) {
}
pipelineDocs := schemaToDocs(pipelineSpecSchema)
// TODO: Two fields in resources.Pipeline have the json tag id. Clarify the
// semantics and then add a description if needed. (https://github.com/databricks/bricks/issues/242)
// semantics and then add a description if needed. (https://github.com/databricks/cli/issues/242)
pipelinesDocs := &Docs{
Description: "List of pipeline definations",
AdditionalProperties: pipelineDocs,

View File

@ -9,7 +9,7 @@ import (
func TestGitConfig(t *testing.T) {
b := load(t, "./autoload_git")
assert.Equal(t, "foo", b.Config.Bundle.Git.Branch)
sshUrl := "git@github.com:databricks/bricks.git"
httpsUrl := "https://github.com/databricks/bricks"
sshUrl := "git@github.com:databricks/cli.git"
httpsUrl := "https://github.com/databricks/cli"
assert.Contains(t, []string{sshUrl, httpsUrl}, b.Config.Bundle.Git.OriginURL)
}

View File

@ -6,8 +6,8 @@ import (
"path/filepath"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -4,8 +4,8 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/interpolation"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/interpolation"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

View File

@ -4,8 +4,8 @@ import (
"context"
"testing"
"github.com/databricks/bricks/bundle"
"github.com/databricks/bricks/bundle/config/mutator"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/require"
)

View File

@ -0,0 +1,32 @@
variables:
a:
description: optional variable
default: default-a
b:
description: required variable
bundle:
name: test bundle
workspace:
profile: ${var.a} ${var.b}
environments:
env-with-single-variable-override:
variables:
b: dev-b
env-missing-a-required-variable-assignment:
variables:
a: staging-a
env-with-two-variable-overrides:
variables:
a: prod-a
b: prod-b
env-using-an-undefined-variable:
variables:
c: prod-c
b: prod-b

View File

@ -0,0 +1,10 @@
variables:
a:
description: optional variable
default: abc
b:
description: required variable
bundle:
name: ${var.a} ${var.b}

View File

@ -0,0 +1,94 @@
package config_tests
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/interpolation"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestVariables(t *testing.T) {
t.Setenv("BUNDLE_VAR_b", "def")
b := load(t, "./variables/vanilla")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
require.NoError(t, err)
assert.Equal(t, "abc def", b.Config.Bundle.Name)
}
func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) {
b := load(t, "./variables/vanilla")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
}
func TestVariablesEnvironmentsBlockOverride(t *testing.T) {
b := load(t, "./variables/env_overrides")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SelectEnvironment("env-with-single-variable-override"),
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
require.NoError(t, err)
assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile)
}
func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) {
b := load(t, "./variables/env_overrides")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SelectEnvironment("env-with-two-variable-overrides"),
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
require.NoError(t, err)
assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile)
}
func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) {
t.Setenv("BUNDLE_VAR_b", "env-var-b")
b := load(t, "./variables/env_overrides")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SelectEnvironment("env-with-two-variable-overrides"),
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
require.NoError(t, err)
assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile)
}
func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) {
b := load(t, "./variables/env_overrides")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SelectEnvironment("env-missing-a-required-variable-assignment"),
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
}
func TestVariablesEnvironmentsBlockOverrideWithUndefinedVariables(t *testing.T) {
b := load(t, "./variables/env_overrides")
err := bundle.Apply(context.Background(), b, []bundle.Mutator{
mutator.SelectEnvironment("env-using-an-undefined-variable"),
mutator.SetVariables(),
interpolation.Interpolate(
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
)})
assert.ErrorContains(t, err, "variable c is not defined but is assigned a value")
}

Some files were not shown because too many files have changed in this diff Show More