Rename bricks -> databricks (#389)

## Changes

Rename all instances of "bricks" to "databricks".

## Tests

* Confirmed the goreleaser build works, uses the correct new binary
name, and produces the right archives.
* Help output is confirmed to be correct.
* Output of `git grep -w bricks` is minimal with a couple changes
remaining for after the repository rename.
This commit is contained in:
Pieter Noordhuis 2023-05-16 18:35:39 +02:00 committed by GitHub
parent 180dfc9a40
commit 98ebb78c9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
255 changed files with 1527 additions and 1525 deletions

View File

@ -5,10 +5,10 @@ package account
{{$excludes := list}} {{$excludes := list}}
import ( import (
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/spf13/cobra" "github.com/spf13/cobra"
{{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) }} {{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) }}
{{.SnakeName}} "github.com/databricks/bricks/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}} {{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}}
) )
var accountCmd = &cobra.Command{ var accountCmd = &cobra.Command{

View File

@ -5,9 +5,9 @@ package cmd
{{$excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions"}} {{$excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions"}}
import ( import (
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
{{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) }} {{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) }}
{{.SnakeName}} "github.com/databricks/bricks/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}} {{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}}
) )
func init() { func init() {

View File

@ -3,9 +3,9 @@
package {{(.TrimPrefix "account").SnakeName}} package {{(.TrimPrefix "account").SnakeName}}
import ( import (
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/databricks-sdk-go/service/{{.Package.Name}}" "github.com/databricks/databricks-sdk-go/service/{{.Package.Name}}"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

View File

@ -52,23 +52,23 @@ jobs:
- name: Upload macOS binaries - name: Upload macOS binaries
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: bricks_darwin_snapshot name: cli_darwin_snapshot
path: | path: |
dist/bricks_darwin_*/ dist/*_darwin_*/
- name: Upload Linux binaries - name: Upload Linux binaries
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: bricks_linux_snapshot name: cli_linux_snapshot
path: | path: |
dist/bricks_linux_*/ dist/*_linux_*/
- name: Upload Windows binaries - name: Upload Windows binaries
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: bricks_windows_snapshot name: cli_windows_snapshot
path: | path: |
dist/bricks_windows_*/ dist/*_windows_*/
- name: Update snapshot tag - name: Update snapshot tag
@ -91,4 +91,4 @@ jobs:
tag_name: snapshot tag_name: snapshot
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
files: | files: |
dist/bricks*.zip dist/databricks_cli_*.zip

4
.gitignore vendored
View File

@ -6,7 +6,7 @@
*.dll *.dll
*.so *.so
*.dylib *.dylib
bricks cli
# Test binary, built with `go test -c` # Test binary, built with `go test -c`
*.test *.test
@ -27,4 +27,4 @@ __pycache__
.terraform .terraform
.terraform.lock.hcl .terraform.lock.hcl
.vscode/launch.json .vscode/launch.json

View File

@ -9,24 +9,24 @@ builds:
- -trimpath - -trimpath
ldflags: ldflags:
- '-s -w' - '-s -w'
- -X github.com/databricks/bricks/internal/build.buildProjectName={{ .ProjectName }} - -X github.com/databricks/cli/internal/build.buildProjectName={{ .ProjectName }}
- -X github.com/databricks/bricks/internal/build.buildVersion={{ .Version }} - -X github.com/databricks/cli/internal/build.buildVersion={{ .Version }}
# Git information # Git information
- -X github.com/databricks/bricks/internal/build.buildBranch={{ .Branch }} - -X github.com/databricks/cli/internal/build.buildBranch={{ .Branch }}
- -X github.com/databricks/bricks/internal/build.buildTag={{ .Tag }} - -X github.com/databricks/cli/internal/build.buildTag={{ .Tag }}
- -X github.com/databricks/bricks/internal/build.buildShortCommit={{ .ShortCommit }} - -X github.com/databricks/cli/internal/build.buildShortCommit={{ .ShortCommit }}
- -X github.com/databricks/bricks/internal/build.buildFullCommit={{ .FullCommit }} - -X github.com/databricks/cli/internal/build.buildFullCommit={{ .FullCommit }}
- -X github.com/databricks/bricks/internal/build.buildCommitTimestamp={{ .CommitTimestamp }} - -X github.com/databricks/cli/internal/build.buildCommitTimestamp={{ .CommitTimestamp }}
- -X github.com/databricks/bricks/internal/build.buildSummary={{ .Summary }} - -X github.com/databricks/cli/internal/build.buildSummary={{ .Summary }}
# Version information # Version information
- -X github.com/databricks/bricks/internal/build.buildMajor={{ .Major }} - -X github.com/databricks/cli/internal/build.buildMajor={{ .Major }}
- -X github.com/databricks/bricks/internal/build.buildMinor={{ .Minor }} - -X github.com/databricks/cli/internal/build.buildMinor={{ .Minor }}
- -X github.com/databricks/bricks/internal/build.buildPatch={{ .Patch }} - -X github.com/databricks/cli/internal/build.buildPatch={{ .Patch }}
- -X github.com/databricks/bricks/internal/build.buildPrerelease={{ .Prerelease }} - -X github.com/databricks/cli/internal/build.buildPrerelease={{ .Prerelease }}
- -X github.com/databricks/bricks/internal/build.buildIsSnapshot={{ .IsSnapshot }} - -X github.com/databricks/cli/internal/build.buildIsSnapshot={{ .IsSnapshot }}
- -X github.com/databricks/bricks/internal/build.buildTimestamp={{ .Timestamp }} - -X github.com/databricks/cli/internal/build.buildTimestamp={{ .Timestamp }}
goos: goos:
- windows - windows
@ -41,7 +41,7 @@ builds:
goarch: '386' goarch: '386'
- goos: linux - goos: linux
goarch: '386' goarch: '386'
binary: '{{ .ProjectName }}' binary: databricks
archives: archives:
- format: zip - format: zip
@ -49,10 +49,10 @@ archives:
# Snapshot archives must have a stable file name such that the artifacts in the nightly # Snapshot archives must have a stable file name such that the artifacts in the nightly
# release are automatically overwritten. If the snapshot version is included in the # release are automatically overwritten. If the snapshot version is included in the
# file name then additional logic to clean up older builds would be needed. # file name then additional logic to clean up older builds would be needed.
name_template: '{{ .ProjectName }}_{{ if not .IsSnapshot }}{{ .Version }}_{{ end }}{{ .Os }}_{{ .Arch }}' name_template: 'databricks_cli_{{ if not .IsSnapshot }}{{ .Version }}_{{ end }}{{ .Os }}_{{ .Arch }}'
checksum: checksum:
name_template: '{{ .ProjectName }}_{{ .Version }}_SHA256SUMS' name_template: 'databricks_cli_{{ .Version }}_SHA256SUMS'
algorithm: sha256 algorithm: sha256
snapshot: snapshot:
name_template: '{{ incpatch .Version }}-dev+{{ .ShortCommit }}' name_template: '{{ incpatch .Version }}-dev+{{ .ShortCommit }}'

View File

@ -5,5 +5,7 @@
}, },
"files.trimTrailingWhitespace": true, "files.trimTrailingWhitespace": true,
"files.insertFinalNewline": true, "files.insertFinalNewline": true,
"files.trimFinalNewlines": true "files.trimFinalNewlines": true,
"python.envFile": "${workspaceFolder}/.databricks/.databricks.env",
"databricks.python.envFile": "${workspaceFolder}/.env"
} }

View File

@ -1,16 +1,16 @@
# Version changelog # Version changelog
## 0.0.32 ## 0.0.32
* Add support for variables in bundle config. Introduces 4 ways of setting variable values, which in decreasing order of priority are: ([#383](https://github.com/databricks/bricks/pull/383))([#359](https://github.com/databricks/bricks/pull/359)). * Add support for variables in bundle config. Introduces 4 ways of setting variable values, which in decreasing order of priority are: ([#383](https://github.com/databricks/cli/pull/383))([#359](https://github.com/databricks/cli/pull/359)).
1. Command line flag. For example: `--var="foo=bar"` 1. Command line flag. For example: `--var="foo=bar"`
2. Environment variable. eg: BUNDLE_VAR_foo=bar 2. Environment variable. eg: BUNDLE_VAR_foo=bar
3. Default value as defined in the applicable environments block 3. Default value as defined in the applicable environments block
4. Default value defined in variable definition 4. Default value defined in variable definition
* Make the git details bundle config block optional ([#372](https://github.com/databricks/bricks/pull/372)). * Make the git details bundle config block optional ([#372](https://github.com/databricks/cli/pull/372)).
* Fix api post integration tests ([#371](https://github.com/databricks/bricks/pull/371)). * Fix api post integration tests ([#371](https://github.com/databricks/cli/pull/371)).
* Fix table of content by removing not required top-level item ([#366](https://github.com/databricks/bricks/pull/366)). * Fix table of content by removing not required top-level item ([#366](https://github.com/databricks/cli/pull/366)).
* Fix printing the tasks in job output in DAG execution order ([#377](https://github.com/databricks/bricks/pull/377)). * Fix printing the tasks in job output in DAG execution order ([#377](https://github.com/databricks/cli/pull/377)).
* Improved error message when 'bricks bundle run' is executed before 'bricks bundle deploy' ([#378](https://github.com/databricks/bricks/pull/378)). * Improved error message when 'bricks bundle run' is executed before 'bricks bundle deploy' ([#378](https://github.com/databricks/cli/pull/378)).
## 0.0.31 ## 0.0.31
@ -22,4 +22,4 @@
## 0.0.30 ## 0.0.30
* Initial preview release of the Bricks CLI. * Initial preview release of the Databricks CLI.

View File

@ -1,6 +1,6 @@
# Bricks CLI # Databricks CLI
[![build](https://github.com/databricks/bricks/workflows/build/badge.svg?branch=main)](https://github.com/databricks/bricks/actions?query=workflow%3Abuild+branch%3Amain) [![build](https://github.com/databricks/cli/workflows/build/badge.svg?branch=main)](https://github.com/databricks/cli/actions?query=workflow%3Abuild+branch%3Amain)
This project is in private preview. This project is in private preview.
@ -12,9 +12,9 @@ Documentation is available at https://docs.databricks.com/dev-tools/cli/bricks-c
This CLI is packaged as a dependency-free binary executable and may be located in any directory. This CLI is packaged as a dependency-free binary executable and may be located in any directory.
For convenient access, copy the `bricks` binary to any directory listed in `$PATH`. For convenient access, copy the `databricks` binary to any directory listed in `$PATH`.
Confirm the binary works by executing `bricks version`. Confirm the binary works by executing `databricks version`.
## Authentication ## Authentication

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/artifacts/notebook" "github.com/databricks/cli/bundle/artifacts/notebook"
) )
func BuildAll() bundle.Mutator { func BuildAll() bundle.Mutator {

View File

@ -9,7 +9,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
) )

View File

@ -8,7 +8,7 @@ import (
"os" "os"
"path" "path"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/artifacts/notebook" "github.com/databricks/cli/bundle/artifacts/notebook"
) )
func UploadAll() bundle.Mutator { func UploadAll() bundle.Mutator {

View File

@ -12,11 +12,11 @@ import (
"path/filepath" "path/filepath"
"sync" "sync"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/folders" "github.com/databricks/cli/folders"
"github.com/databricks/bricks/libs/git" "github.com/databricks/cli/libs/git"
"github.com/databricks/bricks/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/bricks/libs/terraform" "github.com/databricks/cli/libs/terraform"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
sdkconfig "github.com/databricks/databricks-sdk-go/config" sdkconfig "github.com/databricks/databricks-sdk-go/config"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"

View File

@ -9,8 +9,8 @@ import (
"sort" "sort"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )

View File

@ -3,8 +3,8 @@ package interpolation
import ( import (
"testing" "testing"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
) )
type defineDefaultEnvironment struct { type defineDefaultEnvironment struct {

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -3,7 +3,7 @@ package mutator
import ( import (
"context" "context"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"path" "path"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
type defineDefaultWorkspacePaths struct{} type defineDefaultWorkspacePaths struct{}

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
type defineDefaultWorkspaceRoot struct{} type defineDefaultWorkspaceRoot struct{}

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -6,7 +6,7 @@ import (
"path" "path"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
type expandWorkspaceRoot struct{} type expandWorkspaceRoot struct{}

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"

View File

@ -3,9 +3,9 @@ package mutator
import ( import (
"context" "context"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/git" "github.com/databricks/cli/libs/git"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
) )
type loadGitDetails struct{} type loadGitDetails struct{}

View File

@ -1,7 +1,7 @@
package mutator package mutator
import ( import (
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
func DefaultMutators() []bundle.Mutator { func DefaultMutators() []bundle.Mutator {

View File

@ -3,7 +3,7 @@ package mutator
import ( import (
"context" "context"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
type populateCurrentUser struct{} type populateCurrentUser struct{}

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
) )
type processInclude struct { type processInclude struct {

View File

@ -7,9 +7,9 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -5,8 +5,8 @@ import (
"fmt" "fmt"
"path/filepath" "path/filepath"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
) )

View File

@ -7,9 +7,9 @@ import (
"runtime" "runtime"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -5,7 +5,7 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
) )

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
type selectEnvironment struct { type selectEnvironment struct {

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -5,8 +5,8 @@ import (
"fmt" "fmt"
"os" "os"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
) )
const bundleVarPrefix = "BUNDLE_VAR_" const bundleVarPrefix = "BUNDLE_VAR_"
@ -48,7 +48,7 @@ func setVariable(v *variable.Variable, name string) error {
// We should have had a value to set for the variable at this point. // We should have had a value to set for the variable at this point.
// TODO: use cmdio to request values for unassigned variables if current // TODO: use cmdio to request values for unassigned variables if current
// terminal is a tty. Tracked in https://github.com/databricks/bricks/issues/379 // terminal is a tty. Tracked in https://github.com/databricks/cli/issues/379
return fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) return fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
} }

View File

@ -4,9 +4,9 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -8,8 +8,8 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/notebook" "github.com/databricks/cli/libs/notebook"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
) )

View File

@ -6,10 +6,10 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"

View File

@ -3,7 +3,7 @@ package config
import ( import (
"fmt" "fmt"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
) )
// Resources defines Databricks resources associated with the bundle. // Resources defines Databricks resources associated with the bundle.

View File

@ -3,7 +3,7 @@ package config
import ( import (
"testing" "testing"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )

View File

@ -6,7 +6,7 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/ghodss/yaml" "github.com/ghodss/yaml"
"github.com/imdario/mergo" "github.com/imdario/mergo"
) )

View File

@ -5,7 +5,7 @@ import (
"reflect" "reflect"
"testing" "testing"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,7 +4,7 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/databricks/bricks/libs/databrickscfg" "github.com/databricks/cli/libs/databrickscfg"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/config" "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"

View File

@ -3,7 +3,7 @@ package bundle
import ( import (
"context" "context"
"github.com/databricks/bricks/libs/errs" "github.com/databricks/cli/libs/errs"
) )
type DeferredMutator struct { type DeferredMutator struct {

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/fatih/color" "github.com/fatih/color"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/sync" "github.com/databricks/cli/libs/sync"
) )
func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) { func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) {

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
) )
type upload struct{} type upload struct{}

View File

@ -3,9 +3,9 @@ package lock
import ( import (
"context" "context"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
) )
type acquire struct{} type acquire struct{}

View File

@ -3,8 +3,8 @@ package lock
import ( import (
"context" "context"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
) )
type release struct{} type release struct{}

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )

View File

@ -4,9 +4,9 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/bricks/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
) )

View File

@ -3,8 +3,8 @@ package terraform
import ( import (
"testing" "testing"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/ml"

View File

@ -5,8 +5,8 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/fatih/color" "github.com/fatih/color"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"

View File

@ -1,7 +1,7 @@
package terraform package terraform
import ( import (
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
// Dir returns the Terraform working directory for a given bundle. // Dir returns the Terraform working directory for a given bundle.

View File

@ -8,9 +8,9 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
"github.com/hashicorp/go-version" "github.com/hashicorp/go-version"
"github.com/hashicorp/hc-install/product" "github.com/hashicorp/hc-install/product"
"github.com/hashicorp/hc-install/releases" "github.com/hashicorp/hc-install/releases"

View File

@ -5,8 +5,8 @@ import (
"os/exec" "os/exec"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,8 +4,8 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/interpolation"
) )
// Rewrite variable references to resources into Terraform compatible format. // Rewrite variable references to resources into Terraform compatible format.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
) )
@ -47,7 +47,7 @@ func (l *load) Apply(ctx context.Context, b *bundle.Bundle) ([]bundle.Mutator, e
func ValidateState(state *tfjson.State) error { func ValidateState(state *tfjson.State) error {
if state.Values == nil { if state.Values == nil {
return fmt.Errorf("no deployment state. Did you forget to run 'bricks bundle deploy'?") return fmt.Errorf("no deployment state. Did you forget to run 'databricks bundle deploy'?")
} }
if state.Values.RootModule == nil { if state.Values.RootModule == nil {

View File

@ -5,8 +5,8 @@ import (
"os/exec" "os/exec"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -37,5 +37,5 @@ func TestLoadWithNoState(t *testing.T) {
Load(), Load(),
}) })
require.ErrorContains(t, err, "Did you forget to run 'bricks bundle deploy'") require.ErrorContains(t, err, "Did you forget to run 'databricks bundle deploy'")
} }

View File

@ -5,9 +5,9 @@ import (
"fmt" "fmt"
"path/filepath" "path/filepath"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/terraform" "github.com/databricks/cli/libs/terraform"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )

View File

@ -6,9 +6,9 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
) )

View File

@ -5,9 +5,9 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
) )
type statePush struct{} type statePush struct{}

View File

@ -6,7 +6,7 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
type write struct{} type write struct{}

View File

@ -7,8 +7,8 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/databricks/bricks/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
) )

View File

@ -1,4 +1,4 @@
module github.com/databricks/bricks/bundle/internal/tf/codegen module github.com/databricks/cli/bundle/internal/tf/codegen
go 1.18 go 1.18

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"log" "log"
"github.com/databricks/bricks/bundle/internal/tf/codegen/generator" "github.com/databricks/cli/bundle/internal/tf/codegen/generator"
"github.com/databricks/bricks/bundle/internal/tf/codegen/schema" "github.com/databricks/cli/bundle/internal/tf/codegen/schema"
) )
func main() { func main() {

View File

@ -3,7 +3,7 @@ package bundle
import ( import (
"context" "context"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
) )
// Mutator is the interface type that mutates a bundle's configuration or internal state. // Mutator is the interface type that mutates a bundle's configuration or internal state.

View File

@ -1,9 +1,9 @@
package phases package phases
import ( import (
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/bricks/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/interpolation"
) )
// The build phase builds artifacts. // The build phase builds artifacts.

View File

@ -1,11 +1,11 @@
package phases package phases
import ( import (
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/bricks/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/bricks/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/lock"
"github.com/databricks/bricks/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
) )
// The deploy phase deploys artifacts and resources. // The deploy phase deploys artifacts and resources.

View File

@ -1,10 +1,10 @@
package phases package phases
import ( import (
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/files"
"github.com/databricks/bricks/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/lock"
"github.com/databricks/bricks/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
) )
// The destroy phase deletes artifacts and resources. // The destroy phase deletes artifacts and resources.

View File

@ -1,11 +1,11 @@
package phases package phases
import ( import (
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/interpolation"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/bricks/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
) )
// The initialize phase fills in defaults and connects to the workspace. // The initialize phase fills in defaults and connects to the workspace.

View File

@ -4,8 +4,8 @@ package phases
import ( import (
"context" "context"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
) )
// This phase type groups mutators that belong to a lifecycle phase. // This phase type groups mutators that belong to a lifecycle phase.

View File

@ -4,8 +4,8 @@ import (
"fmt" "fmt"
"os" "os"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/folders" "github.com/databricks/cli/folders"
) )
const envBundleRoot = "BUNDLE_ROOT" const envBundleRoot = "BUNDLE_ROOT"

View File

@ -5,7 +5,7 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -6,12 +6,12 @@ import (
"strconv" "strconv"
"time" "time"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/bricks/bundle/run/output" "github.com/databricks/cli/bundle/run/output"
"github.com/databricks/bricks/bundle/run/progress" "github.com/databricks/cli/bundle/run/progress"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/retries" "github.com/databricks/databricks-sdk-go/retries"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/fatih/color" "github.com/fatih/color"

View File

@ -3,7 +3,7 @@ package run
import ( import (
"fmt" "fmt"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
) )
// RunnerLookup maps identifiers to a list of workloads that match that identifier. // RunnerLookup maps identifiers to a list of workloads that match that identifier.

View File

@ -3,9 +3,9 @@ package run
import ( import (
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )

View File

@ -6,12 +6,12 @@ import (
"strings" "strings"
"time" "time"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/bricks/bundle/run/output" "github.com/databricks/cli/bundle/run/output"
"github.com/databricks/bricks/bundle/run/progress" "github.com/databricks/cli/bundle/run/progress"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
flag "github.com/spf13/pflag" flag "github.com/spf13/pflag"
) )

View File

@ -43,7 +43,7 @@ func (event *ProgressEvent) IsInplaceSupported() bool {
return false return false
} }
// TODO: Add inplace logging to pipelines. https://github.com/databricks/bricks/issues/280 // TODO: Add inplace logging to pipelines. https://github.com/databricks/cli/issues/280
type UpdateTracker struct { type UpdateTracker struct {
UpdateId string UpdateId string
PipelineId string PipelineId string

View File

@ -5,8 +5,8 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/run/output" "github.com/databricks/cli/bundle/run/output"
) )
type key string type key string

View File

@ -3,9 +3,9 @@ package run
import ( import (
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/bricks/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )

View File

@ -14,14 +14,14 @@ These descriptions are rendered in the inline documentation in an IDE
### SOP: Add schema descriptions for new fields in bundle config ### SOP: Add schema descriptions for new fields in bundle config
1. You can autogenerate empty descriptions for the new fields by running 1. You can autogenerate empty descriptions for the new fields by running
`bricks bundle schema --only-docs > ~/bricks/bundle/schema/docs/bundle_descriptions.json` `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json`
2. Manually edit bundle_descriptions.json to add your descriptions 2. Manually edit bundle_descriptions.json to add your descriptions
3. Build again to embed the new `bundle_descriptions.json` into the binary (`go build`) 3. Build again to embed the new `bundle_descriptions.json` into the binary (`go build`)
4. Again run `bricks bundle schema --only-docs > ~/bricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments` 4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments`
5. push to repo 5. push to repo
### SOP: Update descriptions in resources from a newer openapi spec ### SOP: Update descriptions in resources from a newer openapi spec
1. Run `bricks bundle schema --only-docs --openapi PATH_TO_SPEC > ~/bricks/bundle/schema/docs/bundle_descriptions.json` 1. Run `databricks bundle schema --only-docs --openapi PATH_TO_SPEC > ~/databricks/bundle/schema/docs/bundle_descriptions.json`
2. push to repo 2. push to repo

View File

@ -7,7 +7,7 @@ import (
"os" "os"
"reflect" "reflect"
"github.com/databricks/bricks/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/databricks-sdk-go/openapi" "github.com/databricks/databricks-sdk-go/openapi"
) )

View File

@ -160,7 +160,7 @@ func (reader *OpenapiReader) jobsDocs() (*Docs, error) {
} }
jobDocs := schemaToDocs(jobSettingsSchema) jobDocs := schemaToDocs(jobSettingsSchema)
// TODO: add description for id if needed. // TODO: add description for id if needed.
// Tracked in https://github.com/databricks/bricks/issues/242 // Tracked in https://github.com/databricks/cli/issues/242
jobsDocs := &Docs{ jobsDocs := &Docs{
Description: "List of job definations", Description: "List of job definations",
AdditionalProperties: jobDocs, AdditionalProperties: jobDocs,
@ -175,7 +175,7 @@ func (reader *OpenapiReader) pipelinesDocs() (*Docs, error) {
} }
pipelineDocs := schemaToDocs(pipelineSpecSchema) pipelineDocs := schemaToDocs(pipelineSpecSchema)
// TODO: Two fields in resources.Pipeline have the json tag id. Clarify the // TODO: Two fields in resources.Pipeline have the json tag id. Clarify the
// semantics and then add a description if needed. (https://github.com/databricks/bricks/issues/242) // semantics and then add a description if needed. (https://github.com/databricks/cli/issues/242)
pipelinesDocs := &Docs{ pipelinesDocs := &Docs{
Description: "List of pipeline definations", Description: "List of pipeline definations",
AdditionalProperties: pipelineDocs, AdditionalProperties: pipelineDocs,

View File

@ -6,8 +6,8 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/interpolation"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,8 +4,8 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -4,10 +4,10 @@ import (
"context" "context"
"testing" "testing"
"github.com/databricks/bricks/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/bricks/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/interpolation"
"github.com/databricks/bricks/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/bricks/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )

View File

@ -3,7 +3,7 @@
package billable_usage package billable_usage
import ( import (
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/databricks-sdk-go/service/billing" "github.com/databricks/databricks-sdk-go/service/billing"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

View File

@ -5,9 +5,9 @@ package budgets
import ( import (
"fmt" "fmt"
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/billing" "github.com/databricks/databricks-sdk-go/service/billing"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

44
cmd/account/cmd.go generated
View File

@ -3,30 +3,30 @@
package account package account
import ( import (
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/spf13/cobra" "github.com/spf13/cobra"
billable_usage "github.com/databricks/bricks/cmd/account/billable-usage" billable_usage "github.com/databricks/cli/cmd/account/billable-usage"
budgets "github.com/databricks/bricks/cmd/account/budgets" budgets "github.com/databricks/cli/cmd/account/budgets"
credentials "github.com/databricks/bricks/cmd/account/credentials" credentials "github.com/databricks/cli/cmd/account/credentials"
custom_app_integration "github.com/databricks/bricks/cmd/account/custom-app-integration" custom_app_integration "github.com/databricks/cli/cmd/account/custom-app-integration"
encryption_keys "github.com/databricks/bricks/cmd/account/encryption-keys" encryption_keys "github.com/databricks/cli/cmd/account/encryption-keys"
account_groups "github.com/databricks/bricks/cmd/account/groups" account_groups "github.com/databricks/cli/cmd/account/groups"
account_ip_access_lists "github.com/databricks/bricks/cmd/account/ip-access-lists" account_ip_access_lists "github.com/databricks/cli/cmd/account/ip-access-lists"
log_delivery "github.com/databricks/bricks/cmd/account/log-delivery" log_delivery "github.com/databricks/cli/cmd/account/log-delivery"
account_metastore_assignments "github.com/databricks/bricks/cmd/account/metastore-assignments" account_metastore_assignments "github.com/databricks/cli/cmd/account/metastore-assignments"
account_metastores "github.com/databricks/bricks/cmd/account/metastores" account_metastores "github.com/databricks/cli/cmd/account/metastores"
networks "github.com/databricks/bricks/cmd/account/networks" networks "github.com/databricks/cli/cmd/account/networks"
o_auth_enrollment "github.com/databricks/bricks/cmd/account/o-auth-enrollment" o_auth_enrollment "github.com/databricks/cli/cmd/account/o-auth-enrollment"
private_access "github.com/databricks/bricks/cmd/account/private-access" private_access "github.com/databricks/cli/cmd/account/private-access"
published_app_integration "github.com/databricks/bricks/cmd/account/published-app-integration" published_app_integration "github.com/databricks/cli/cmd/account/published-app-integration"
account_service_principals "github.com/databricks/bricks/cmd/account/service-principals" account_service_principals "github.com/databricks/cli/cmd/account/service-principals"
storage "github.com/databricks/bricks/cmd/account/storage" storage "github.com/databricks/cli/cmd/account/storage"
account_storage_credentials "github.com/databricks/bricks/cmd/account/storage-credentials" account_storage_credentials "github.com/databricks/cli/cmd/account/storage-credentials"
account_users "github.com/databricks/bricks/cmd/account/users" account_users "github.com/databricks/cli/cmd/account/users"
vpc_endpoints "github.com/databricks/bricks/cmd/account/vpc-endpoints" vpc_endpoints "github.com/databricks/cli/cmd/account/vpc-endpoints"
workspace_assignment "github.com/databricks/bricks/cmd/account/workspace-assignment" workspace_assignment "github.com/databricks/cli/cmd/account/workspace-assignment"
workspaces "github.com/databricks/bricks/cmd/account/workspaces" workspaces "github.com/databricks/cli/cmd/account/workspaces"
) )
var accountCmd = &cobra.Command{ var accountCmd = &cobra.Command{

View File

@ -5,9 +5,9 @@ package credentials
import ( import (
"fmt" "fmt"
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/provisioning" "github.com/databricks/databricks-sdk-go/service/provisioning"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

View File

@ -1,6 +1,6 @@
package credentials package credentials
import "github.com/databricks/bricks/libs/cmdio" import "github.com/databricks/cli/libs/cmdio"
func init() { func init() {
listCmd.Annotations["template"] = cmdio.Heredoc(` listCmd.Annotations["template"] = cmdio.Heredoc(`

View File

@ -5,9 +5,9 @@ package custom_app_integration
import ( import (
"fmt" "fmt"
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/oauth2" "github.com/databricks/databricks-sdk-go/service/oauth2"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

View File

@ -5,9 +5,9 @@ package encryption_keys
import ( import (
"fmt" "fmt"
"github.com/databricks/bricks/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/bricks/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/bricks/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/provisioning" "github.com/databricks/databricks-sdk-go/service/provisioning"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

View File

@ -1,6 +1,6 @@
package encryption_keys package encryption_keys
import "github.com/databricks/bricks/libs/cmdio" import "github.com/databricks/cli/libs/cmdio"
func init() { func init() {
listCmd.Annotations["template"] = cmdio.Heredoc(` listCmd.Annotations["template"] = cmdio.Heredoc(`

Some files were not shown because too many files have changed in this diff Show More