Merge remote-tracking branch 'origin' into validate-response-body

This commit is contained in:
Shreyas Goenka 2025-01-29 17:17:54 +01:00
commit 15c09c61d5
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
62 changed files with 11609 additions and 434 deletions

View File

@ -1,5 +1,25 @@
# Version changelog
## [Release] Release v0.240.0
Bundles:
* Added support for double underscore variable references ([#2203](https://github.com/databricks/cli/pull/2203)).
* Do not wait for app compute to start on `bundle deploy` ([#2144](https://github.com/databricks/cli/pull/2144)).
* Remove bundle.git.inferred ([#2258](https://github.com/databricks/cli/pull/2258)).
* libs/python: Remove DetectInterpreters ([#2234](https://github.com/databricks/cli/pull/2234)).
API Changes:
* Added `databricks access-control` command group.
* Added `databricks serving-endpoints http-request` command.
* Changed `databricks serving-endpoints create` command with new required argument order.
* Changed `databricks serving-endpoints get-open-api` command return type to become non-empty.
* Changed `databricks recipients update` command return type to become non-empty.
OpenAPI commit 0be1b914249781b5e903b7676fd02255755bc851 (2025-01-22)
Dependency updates:
* Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 ([#2238](https://github.com/databricks/cli/pull/2238)).
* Upgrade TF provider to 1.64.1 ([#2247](https://github.com/databricks/cli/pull/2247)).
## [Release] Release v0.239.1
CLI:

View File

@ -48,6 +48,9 @@ vendor:
schema:
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
docs:
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
integration:
@ -56,4 +59,4 @@ integration:
integration-short:
$(INTEGRATION) -short
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs

View File

@ -21,8 +21,7 @@ Error: unable to load repository specific gitconfig: open config: permission den
Exit code: 1
{
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
@ -31,8 +30,7 @@ Error: unable to load repository specific gitconfig: open config: permission den
Exit code: 1
{
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
}
@ -42,14 +40,12 @@ Exit code: 1
>>> $CLI bundle validate -o json
{
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
{
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
}
@ -63,8 +59,7 @@ Error: unable to load repository specific gitconfig: open config: permission den
Exit code: 1
{
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
@ -73,6 +68,5 @@ Error: unable to load repository specific gitconfig: open config: permission den
Exit code: 1
{
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
}

View File

@ -0,0 +1,14 @@
bundle:
name: double_underscore
variables:
double__underscore:
description: "This is a variable with a double underscore"
default: "default"
resources:
jobs:
test_job:
name: "test"
tasks:
- task_key: "test ${var.double__underscore}"

View File

@ -0,0 +1,7 @@
>>> $CLI bundle validate -o json
[
{
"task_key": "test default"
}
]

View File

@ -0,0 +1 @@
trace $CLI bundle validate -o json | jq .resources.jobs.test_job.tasks

View File

@ -39,6 +39,37 @@
}
}
=== file cannot be parsed
>>> errcode $CLI bundle validate -o json --target invalid_json
Error: failed to parse variables file $TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
Exit code: 1
{
"job_cluster_key": "${var.cluster_key}",
"new_cluster": {
"node_type_id": "${var.cluster.node_type_id}",
"num_workers": "${var.cluster_workers}"
}
}
=== file has wrong structure
>>> errcode $CLI bundle validate -o json --target wrong_file_structure
Error: failed to parse variables file $TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
Variables file must be a JSON object with the following format:
{"var1": "value1", "var2": "value2"}
Exit code: 1
{
"job_cluster_key": "${var.cluster_key}",
"new_cluster": {
"node_type_id": "${var.cluster.node_type_id}",
"num_workers": "${var.cluster_workers}"
}
}
=== file has variable that is complex but default is string
>>> errcode $CLI bundle validate -o json --target complex_to_string
Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type

View File

@ -14,11 +14,11 @@ trace BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate
title "variable has value in config file"
trace $CLI bundle validate -o json --target with_value | jq $cluster_expr
# title "file cannot be parsed"
# trace errcode $CLI bundle validate -o json --target invalid_json | jq $cluster_expr
title "file cannot be parsed"
trace errcode $CLI bundle validate -o json --target invalid_json | jq $cluster_expr
# title "file has wrong structure"
# trace errcode $CLI bundle validate -o json --target wrong_file_structure | jq $cluster_expr
title "file has wrong structure"
trace errcode $CLI bundle validate -o json --target wrong_file_structure | jq $cluster_expr
title "file has variable that is complex but default is string"
trace errcode $CLI bundle validate -o json --target complex_to_string | jq $cluster_expr

View File

@ -0,0 +1,8 @@
# Fix for windows
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\wrong_file_structure\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json'
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\invalid_json\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json'

View File

@ -3,8 +3,7 @@
"bundle": {
"environment": "dev",
"git": {
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
},
"target": "dev",
"terraform": {

View File

@ -13,7 +13,8 @@ import (
)
func StartCmdServer(t *testing.T) *testserver.Server {
server := StartServer(t)
server := testserver.New(t)
server.Handle("/", func(r *http.Request) (any, error) {
q := r.URL.Query()
args := strings.Split(q.Get("args"), " ")

View File

@ -2,7 +2,6 @@ package acceptance_test
import (
"net/http"
"testing"
"github.com/databricks/cli/libs/testserver"
"github.com/databricks/databricks-sdk-go/service/catalog"
@ -11,14 +10,6 @@ import (
"github.com/databricks/databricks-sdk-go/service/workspace"
)
func StartServer(t *testing.T) *testserver.Server {
server := testserver.New(t)
t.Cleanup(func() {
server.Close()
})
return server
}
func AddHandlers(server *testserver.Server) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
return compute.ListPoliciesResponse{

View File

@ -8,9 +8,6 @@ type Git struct {
// Path to bundle root relative to the git repository root.
BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"`
// Inferred is set to true if the Git details were inferred and weren't set explicitly
Inferred bool `json:"inferred,omitempty" bundle:"readonly"`
// The actual branch according to Git (may be different from the configured branch)
ActualBranch string `json:"actual_branch,omitempty" bundle:"readonly"`
}

View File

@ -40,7 +40,6 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn
b.Config.Bundle.Git.ActualBranch = info.CurrentBranch
if b.Config.Bundle.Git.Branch == "" {
// Only load branch if there's no user defined value
b.Config.Bundle.Git.Inferred = true
b.Config.Bundle.Git.Branch = info.CurrentBranch
}

View File

@ -135,11 +135,6 @@ func findNonUserPath(b *bundle.Bundle) string {
}
func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) diag.Diagnostics {
if b.Config.Bundle.Git.Inferred {
env := b.Config.Bundle.Target
log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env)
}
r := b.Config.Resources
for i := range r.Pipelines {
if r.Pipelines[i].Development {

View File

@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"io"
pathlib "path"
"path/filepath"
"github.com/databricks/cli/libs/dyn"
@ -99,7 +100,7 @@ func removeVirtualLocations(locations []dyn.Location) []dyn.Location {
// parsePythonLocations parses locations.json from the Python mutator.
//
// locations file is newline-separated JSON objects with pythonLocationEntry structure.
func parsePythonLocations(input io.Reader) (*pythonLocations, error) {
func parsePythonLocations(bundleRoot string, input io.Reader) (*pythonLocations, error) {
decoder := json.NewDecoder(input)
locations := newPythonLocations()
@ -116,6 +117,12 @@ func parsePythonLocations(input io.Reader) (*pythonLocations, error) {
return nil, fmt.Errorf("failed to parse python location: %s", err)
}
// Output can contain both relative paths and absolute paths outside of bundle root.
// Mutator pipeline expects all path to be absolute at this point, so make all paths absolute.
if !pathlib.IsAbs(entry.File) {
entry.File = filepath.Join(bundleRoot, entry.File)
}
location := dyn.Location{
File: entry.File,
Line: entry.Line,

View File

@ -165,12 +165,28 @@ func TestLoadOutput(t *testing.T) {
require.Equal(t, filepath.Join(bundleRoot, generatedFileName), notebookPath.Locations()[0].File)
}
func TestParsePythonLocations(t *testing.T) {
expected := dyn.Location{File: "foo.py", Line: 1, Column: 2}
func TestParsePythonLocations_absolutePath(t *testing.T) {
// output can contain absolute path that is outside of the bundle root
expected := dyn.Location{File: "/Shared/foo.py", Line: 1, Column: 2}
input := `{"path": "foo", "file": "foo.py", "line": 1, "column": 2}`
input := `{"path": "foo", "file": "/Shared/foo.py", "line": 1, "column": 2}`
reader := bytes.NewReader([]byte(input))
locations, err := parsePythonLocations(reader)
locations, err := parsePythonLocations("/tmp/", reader)
assert.NoError(t, err)
assert.True(t, locations.keys["foo"].exists)
assert.Equal(t, expected, locations.keys["foo"].location)
}
func TestParsePythonLocations_relativePath(t *testing.T) {
// output can contain relative paths, we expect all locations to be absolute
// at this stage of mutator pipeline
expected := dyn.Location{File: filepath.Clean("/tmp/my_project/foo.py"), Line: 1, Column: 2}
input := `{"path": "foo", "file": "foo.py", "line": 1, "column": 2}`
reader := bytes.NewReader([]byte(input))
locations, err := parsePythonLocations(filepath.Clean("/tmp/my_project"), reader)
assert.NoError(t, err)

View File

@ -331,7 +331,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, root dyn.Value, op
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
}
locations, err := loadLocationsFile(locationsPath)
locations, err := loadLocationsFile(opts.bundleRootPath, locationsPath)
if err != nil {
return dyn.InvalidValue, diag.Errorf("failed to load locations: %s", err)
}
@ -381,7 +381,7 @@ func writeInputFile(inputPath string, input dyn.Value) error {
}
// loadLocationsFile loads locations.json containing source locations for generated YAML.
func loadLocationsFile(locationsPath string) (*pythonLocations, error) {
func loadLocationsFile(bundleRoot, locationsPath string) (*pythonLocations, error) {
locationsFile, err := os.Open(locationsPath)
if errors.Is(err, fs.ErrNotExist) {
return newPythonLocations(), nil
@ -391,7 +391,7 @@ func loadLocationsFile(locationsPath string) (*pythonLocations, error) {
defer locationsFile.Close()
return parsePythonLocations(locationsFile)
return parsePythonLocations(bundleRoot, locationsFile)
}
func loadOutputFile(rootPath, outputPath string, locations *pythonLocations) (dyn.Value, diag.Diagnostics) {

View File

@ -54,6 +54,8 @@ func TestPythonMutator_Name_applyMutators(t *testing.T) {
func TestPythonMutator_loadResources(t *testing.T) {
withFakeVEnv(t, ".venv")
rootPath := filepath.Join(t.TempDir(), "my_project")
b := loadYaml("databricks.yml", `
experimental:
python:
@ -64,6 +66,9 @@ func TestPythonMutator_loadResources(t *testing.T) {
job0:
name: job_0`)
// set rootPath so that we can make absolute paths in dyn.Location
b.BundleRootPath = rootPath
ctx := withProcessStub(
t,
[]string{
@ -120,7 +125,7 @@ func TestPythonMutator_loadResources(t *testing.T) {
assert.Equal(t, []dyn.Location{
{
File: "src/examples/job1.py",
File: filepath.Join(rootPath, "src/examples/job1.py"),
Line: 5,
Column: 7,
},

View File

@ -31,7 +31,6 @@ func TestComputeMetadataMutator(t *testing.T) {
OriginURL: "www.host.com",
Commit: "abcd",
BundleRootPath: "a/b/c/d",
Inferred: true,
},
},
Resources: config.Resources{
@ -72,9 +71,6 @@ func TestComputeMetadataMutator(t *testing.T) {
OriginURL: "www.host.com",
Commit: "abcd",
BundleRootPath: "a/b/c/d",
// Test that this field doesn't carry over into the metadata.
Inferred: false,
},
},
Resources: metadata.Resources{

79
bundle/docsgen/README.md Normal file
View File

@ -0,0 +1,79 @@
## docs-autogen
1. Install [Golang](https://go.dev/doc/install)
2. Run `make vendor docs` from the repo
3. See generated documents in `./bundle/docsgen/output` directory
4. To change descriptions update content in `./bundle/internal/schema/annotations.yml` or `./bundle/internal/schema/annotations_openapi_overrides.yml` and re-run `make docs`
For simpler usage run it together with copy command to move resulting files to local `docs` repo. Note that it will overwrite any local changes in affected files. Example:
```
make docs && cp bundle/docgen/output/*.md ../docs/source/dev-tools/bundles
```
To change intro sections for files update them in `templates/` directory
### Annotation file structure
```yaml
"<root-type-name>":
"<property-name>":
description: Description of the property, only plain text is supported
markdown_description: Description with markdown support, if defined it will override the value in docs and in JSON-schema
markdown_examples: Custom block for any example, in free form, Markdown is supported
title: JSON-schema title, not used in docs
default: Default value of the property, not used in docs
enum: Possible values of enum-type, not used in docs
```
Descriptions with `PLACEHOLDER` value are not displayed in docs and JSON-schema
All relative links like `[_](/dev-tools/bundles/settings.md#cluster_id)` are kept as is in docs but converted to absolute links in JSON schema
To change description for type itself (not its fields) use `"_"`:
```yaml
github.com/databricks/cli/bundle/config/resources.Cluster:
"_":
"markdown_description": |-
The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create).
```
### Example annotation
```yaml
github.com/databricks/cli/bundle/config.Bundle:
"cluster_id":
"description": |-
The ID of a cluster to use to run the bundle.
"markdown_description": |-
The ID of a cluster to use to run the bundle. See [_](/dev-tools/bundles/settings.md#cluster_id).
"compute_id":
"description": |-
PLACEHOLDER
"databricks_cli_version":
"description": |-
The Databricks CLI version to use for the bundle.
"markdown_description": |-
The Databricks CLI version to use for the bundle. See [_](/dev-tools/bundles/settings.md#databricks_cli_version).
"deployment":
"description": |-
The definition of the bundle deployment
"markdown_description": |-
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
"git":
"description": |-
The Git version control details that are associated with your bundle.
"markdown_description": |-
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
"name":
"description": |-
The name of the bundle.
"uuid":
"description": |-
PLACEHOLDER
```
### TODO
Add file watcher to track changes in the annotation files and re-run `make docs` script automtically

135
bundle/docsgen/main.go Normal file
View File

@ -0,0 +1,135 @@
package main
import (
"fmt"
"log"
"os"
"path"
"reflect"
"strings"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/jsonschema"
)
const (
rootFileName = "reference.md"
resourcesFileName = "resources.md"
)
func main() {
if len(os.Args) != 3 {
fmt.Println("Usage: go run main.go <annotation-file> <output-file>")
os.Exit(1)
}
annotationDir := os.Args[1]
docsDir := os.Args[2]
outputDir := path.Join(docsDir, "output")
templatesDir := path.Join(docsDir, "templates")
if _, err := os.Stat(outputDir); os.IsNotExist(err) {
if err := os.MkdirAll(outputDir, 0o755); err != nil {
log.Fatal(err)
}
}
rootHeader, err := os.ReadFile(path.Join(templatesDir, rootFileName))
if err != nil {
log.Fatal(err)
}
err = generateDocs(
[]string{path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, rootFileName),
reflect.TypeOf(config.Root{}),
string(rootHeader),
)
if err != nil {
log.Fatal(err)
}
resourcesHeader, err := os.ReadFile(path.Join(templatesDir, resourcesFileName))
if err != nil {
log.Fatal(err)
}
err = generateDocs(
[]string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, resourcesFileName),
reflect.TypeOf(config.Resources{}),
string(resourcesHeader),
)
if err != nil {
log.Fatal(err)
}
}
func generateDocs(inputPaths []string, outputPath string, rootType reflect.Type, header string) error {
annotations, err := annotation.LoadAndMerge(inputPaths)
if err != nil {
log.Fatal(err)
}
// schemas is used to resolve references to schemas
schemas := map[string]*jsonschema.Schema{}
// ownFields is used to track fields that are defined in the annotation file and should be included in the docs page
ownFields := map[string]bool{}
s, err := jsonschema.FromType(rootType, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
_, isOwnField := annotations[jsonschema.TypePath(typ)]
if isOwnField {
ownFields[jsonschema.TypePath(typ)] = true
}
refPath := getPath(typ)
shouldHandle := strings.HasPrefix(refPath, "github.com")
if !shouldHandle {
schemas[jsonschema.TypePath(typ)] = &s
return s
}
a := annotations[refPath]
if a == nil {
a = map[string]annotation.Descriptor{}
}
rootTypeAnnotation, ok := a["_"]
if ok {
assignAnnotation(&s, rootTypeAnnotation)
}
for k, v := range s.Properties {
assignAnnotation(v, a[k])
}
schemas[jsonschema.TypePath(typ)] = &s
return s
},
})
if err != nil {
log.Fatal(err)
}
nodes := buildNodes(s, schemas, ownFields)
err = buildMarkdown(nodes, outputPath, header)
if err != nil {
log.Fatal(err)
}
return nil
}
func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
if a.Description != "" && a.Description != annotation.Placeholder {
s.Description = a.Description
}
if a.MarkdownDescription != "" {
s.MarkdownDescription = a.MarkdownDescription
}
if a.MarkdownExamples != "" {
s.Examples = []any{a.MarkdownExamples}
}
}

View File

@ -0,0 +1,99 @@
package main
import (
"fmt"
"log"
"os"
"strings"
)
func buildMarkdown(nodes []rootNode, outputFile, header string) error {
m := newMardownRenderer()
m = m.PlainText(header)
for _, node := range nodes {
m = m.LF()
if node.TopLevel {
m = m.H2(node.Title)
} else {
m = m.H3(node.Title)
}
m = m.LF()
if node.Type != "" {
m = m.PlainText(fmt.Sprintf("**`Type: %s`**", node.Type))
m = m.LF()
}
m = m.PlainText(node.Description)
m = m.LF()
if len(node.ObjectKeyAttributes) > 0 {
n := pickLastWord(node.Title)
n = removePluralForm(n)
m = m.CodeBlocks("yaml", fmt.Sprintf("%ss:\n <%s-name>:\n <%s-field-name>: <%s-field-value>", n, n, n, n))
m = m.LF()
m = buildAttributeTable(m, node.ObjectKeyAttributes)
} else if len(node.ArrayItemAttributes) > 0 {
m = m.LF()
m = buildAttributeTable(m, node.ArrayItemAttributes)
} else if len(node.Attributes) > 0 {
m = m.LF()
m = buildAttributeTable(m, node.Attributes)
}
if node.Example != "" {
m = m.LF()
m = m.PlainText("**Example**")
m = m.LF()
m = m.PlainText(node.Example)
}
}
f, err := os.Create(outputFile)
if err != nil {
log.Fatal(err)
}
_, err = f.WriteString(m.String())
if err != nil {
log.Fatal(err)
}
return f.Close()
}
func pickLastWord(s string) string {
words := strings.Split(s, ".")
return words[len(words)-1]
}
// Build a custom table which we use in Databricks website
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
m = m.LF()
m = m.PlainText(".. list-table::")
m = m.PlainText(" :header-rows: 1")
m = m.LF()
m = m.PlainText(" * - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.LF()
for _, a := range attributes {
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.LF()
}
return m
}
func formatDescription(a attributeNode) string {
s := strings.ReplaceAll(a.Description, "\n", " ")
if a.Link != "" {
if strings.HasSuffix(s, ".") {
s += " "
} else if s != "" {
s += ". "
}
s += fmt.Sprintf("See [_](#%s).", a.Link)
}
return s
}

228
bundle/docsgen/nodes.go Normal file
View File

@ -0,0 +1,228 @@
package main
import (
"sort"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
// rootNode is an intermediate representation of resolved JSON-schema item that is used to generate documentation
// Every schema node goes follows this conversion `JSON-schema -> rootNode -> markdown text`
type rootNode struct {
Title string
Description string
Attributes []attributeNode
Example string
ObjectKeyAttributes []attributeNode
ArrayItemAttributes []attributeNode
TopLevel bool
Type string
}
type attributeNode struct {
Title string
Type string
Description string
Link string
}
type rootProp struct {
// k is the name of the property
k string
// v is the corresponding json-schema node
v *jsonschema.Schema
// topLevel is true only for direct properties of the schema of root type (e.g. config.Root or config.Resources)
// Example: config.Root has .
topLevel bool
// circular indicates if property was added by recursive type, e.g. task.for_each_task.task.for_each_task
// These entries don't expand further and don't add any new nodes from their properties
circular bool
}
const MapType = "Map"
// buildNodes converts JSON-schema to a flat list of rootNode items that are then used to generate markdown documentation
// It recursively traverses the schema expanding the resulting list with new items for every properties of nodes `object` and `array` type
func buildNodes(s jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFields map[string]bool) []rootNode {
rootProps := []rootProp{}
for k, v := range s.Properties {
rootProps = append(rootProps, rootProp{k, v, true, false})
}
nodes := make([]rootNode, 0, len(rootProps))
visited := make(map[string]bool)
for i := 0; i < len(rootProps); i++ {
item := rootProps[i]
k := item.k
v := item.v
if visited[k] {
continue
}
visited[k] = true
v = resolveRefs(v, refs)
node := rootNode{
Title: k,
Description: getDescription(v, item.topLevel),
TopLevel: item.topLevel,
Example: getExample(v),
Type: getHumanReadableType(v.Type),
}
hasProperties := len(v.Properties) > 0
if hasProperties {
node.Attributes = getAttributes(v.Properties, refs, ownFields, k, item.circular)
}
mapValueType := getMapValueType(v, refs)
if mapValueType != nil {
d := getDescription(mapValueType, true)
if d != "" {
node.Description = d
}
if node.Example == "" {
node.Example = getExample(mapValueType)
}
node.ObjectKeyAttributes = getAttributes(mapValueType.Properties, refs, ownFields, getMapKeyPrefix(k), item.circular)
}
arrayItemType := resolveRefs(v.Items, refs)
if arrayItemType != nil {
node.ArrayItemAttributes = getAttributes(arrayItemType.Properties, refs, ownFields, k, item.circular)
}
nodes = append(nodes, node)
// Whether we should add new root props from the children of the current JSON-schema node to include their definitions to this document
shouldAddNewProps := !item.circular
if shouldAddNewProps {
newProps := []rootProp{}
// Adds node with definition for the properties. Example:
// bundle:
// prop-name: <value>
if hasProperties {
newProps = append(newProps, extractNodes(k, v.Properties, refs, ownFields)...)
}
// Adds node with definition for the type of array item. Example:
// permissions:
// - <item>
if arrayItemType != nil {
newProps = append(newProps, extractNodes(k, arrayItemType.Properties, refs, ownFields)...)
}
// Adds node with definition for the type of the Map value. Example:
// targets:
// <key>: <value>
if mapValueType != nil {
newProps = append(newProps, extractNodes(getMapKeyPrefix(k), mapValueType.Properties, refs, ownFields)...)
}
rootProps = append(rootProps, newProps...)
}
}
sort.Slice(nodes, func(i, j int) bool {
return nodes[i].Title < nodes[j].Title
})
return nodes
}
func getMapValueType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema) *jsonschema.Schema {
additionalProps, ok := v.AdditionalProperties.(*jsonschema.Schema)
if ok {
return resolveRefs(additionalProps, refs)
}
return nil
}
func getMapKeyPrefix(s string) string {
return s + ".<name>"
}
func removePluralForm(s string) string {
if strings.HasSuffix(s, "s") {
return strings.TrimSuffix(s, "s")
}
return s
}
func getHumanReadableType(t jsonschema.Type) string {
typesMapping := map[string]string{
"string": "String",
"integer": "Integer",
"boolean": "Boolean",
"array": "Sequence",
"object": "Map",
}
return typesMapping[string(t)]
}
func getAttributes(props, refs map[string]*jsonschema.Schema, ownFields map[string]bool, prefix string, circular bool) []attributeNode {
attributes := []attributeNode{}
for k, v := range props {
v = resolveRefs(v, refs)
typeString := getHumanReadableType(v.Type)
if typeString == "" {
typeString = "Any"
}
var reference string
if isReferenceType(v, refs, ownFields) && !circular {
reference = prefix + "." + k
}
attributes = append(attributes, attributeNode{
Title: k,
Type: typeString,
Description: getDescription(v, true),
Link: reference,
})
}
sort.Slice(attributes, func(i, j int) bool {
return attributes[i].Title < attributes[j].Title
})
return attributes
}
func getDescription(s *jsonschema.Schema, allowMarkdown bool) string {
if allowMarkdown && s.MarkdownDescription != "" {
return s.MarkdownDescription
}
return s.Description
}
func shouldExtract(ref string, ownFields map[string]bool) bool {
if i := strings.Index(ref, "github.com"); i >= 0 {
ref = ref[i:]
}
_, isCustomField := ownFields[ref]
return isCustomField
}
// extractNodes returns a list of rootProp items for all properties of the json-schema node that should be extracted based on context
// E.g. we extract all propert
func extractNodes(prefix string, props, refs map[string]*jsonschema.Schema, ownFields map[string]bool) []rootProp {
nodes := []rootProp{}
for k, v := range props {
if v.Reference != nil && !shouldExtract(*v.Reference, ownFields) {
continue
}
v = resolveRefs(v, refs)
if v.Type == "object" || v.Type == "array" {
nodes = append(nodes, rootProp{prefix + "." + k, v, false, isCycleField(k)})
}
}
return nodes
}
func isCycleField(field string) bool {
return field == "for_each_task"
}
func getExample(v *jsonschema.Schema) string {
examples := v.Examples
if len(examples) == 0 {
return ""
}
return examples[0].(string)
}

View File

@ -0,0 +1,120 @@
package main
import (
"testing"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
)
func TestBuildNodes_ChildExpansion(t *testing.T) {
tests := []struct {
name string
schema jsonschema.Schema
refs map[string]*jsonschema.Schema
ownFields map[string]bool
wantNodes []rootNode
}{
{
name: "array expansion",
schema: jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"list": {
Type: "array",
Items: &jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"listSub": {Reference: strPtr("#/$defs/github.com/listSub")},
},
},
},
},
},
refs: map[string]*jsonschema.Schema{
"github.com/listSub": {Type: "array", Items: &jsonschema.Schema{Type: "object", Properties: map[string]*jsonschema.Schema{"subField": {Type: "string"}}}},
},
ownFields: map[string]bool{"github.com/listSub": true},
wantNodes: []rootNode{
{
Title: "list",
TopLevel: true,
Type: "Sequence",
ArrayItemAttributes: []attributeNode{
{Title: "listSub", Type: "Sequence", Link: "list.listSub"},
},
},
{
Title: "list.listSub",
Type: "Sequence",
ArrayItemAttributes: []attributeNode{
{Title: "subField", Type: "String"},
},
},
},
},
{
name: "map expansion",
schema: jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"myMap": {
Type: "object",
AdditionalProperties: &jsonschema.Schema{
Reference: strPtr("#/$defs/github.com/myMap"),
Properties: map[string]*jsonschema.Schema{
"mapSub": {Type: "object", Reference: strPtr("#/$defs/github.com/mapSub")},
},
},
},
},
},
refs: map[string]*jsonschema.Schema{
"github.com/myMap": {
Type: "object",
Properties: map[string]*jsonschema.Schema{
"mapSub": {Type: "boolean", Reference: strPtr("#/$defs/github.com/mapSub")},
},
},
"github.com/mapSub": {
Type: "object",
Properties: map[string]*jsonschema.Schema{
"deepSub": {Type: "boolean"},
},
},
},
ownFields: map[string]bool{
"github.com/myMap": true,
"github.com/mapSub": true,
},
wantNodes: []rootNode{
{
Title: "myMap",
TopLevel: true,
Type: "Map",
ObjectKeyAttributes: []attributeNode{
{Title: "mapSub", Type: "Map", Link: "myMap.<name>.mapSub"},
},
},
{
Title: "myMap.<name>.mapSub",
Type: "Map",
Attributes: []attributeNode{
{Title: "deepSub", Type: "Boolean"},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := buildNodes(tt.schema, tt.refs, tt.ownFields)
assert.Equal(t, tt.wantNodes, got)
})
}
}
func strPtr(s string) *string {
return &s
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

97
bundle/docsgen/refs.go Normal file
View File

@ -0,0 +1,97 @@
package main
import (
"log"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
func isReferenceType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFields map[string]bool) bool {
if v.Type != "object" && v.Type != "array" {
return false
}
if len(v.Properties) > 0 {
return true
}
if v.Items != nil {
items := resolveRefs(v.Items, refs)
if items != nil && items.Type == "object" {
return true
}
}
props := resolveAdditionalProperties(v)
if !isInOwnFields(props, ownFields) {
return false
}
if props != nil {
propsResolved := resolveRefs(props, refs)
return propsResolved.Type == "object"
}
return false
}
func isInOwnFields(node *jsonschema.Schema, ownFields map[string]bool) bool {
if node != nil && node.Reference != nil {
return ownFields[getRefType(node)]
}
return true
}
func resolveAdditionalProperties(v *jsonschema.Schema) *jsonschema.Schema {
if v.AdditionalProperties == nil {
return nil
}
additionalProps, ok := v.AdditionalProperties.(*jsonschema.Schema)
if !ok {
return nil
}
return additionalProps
}
func resolveRefs(s *jsonschema.Schema, schemas map[string]*jsonschema.Schema) *jsonschema.Schema {
if s == nil {
return nil
}
node := s
description := s.Description
markdownDescription := s.MarkdownDescription
examples := s.Examples
for node.Reference != nil {
ref := getRefType(node)
newNode, ok := schemas[ref]
if !ok {
log.Printf("schema %s not found", ref)
break
}
if description == "" {
description = newNode.Description
}
if markdownDescription == "" {
markdownDescription = newNode.MarkdownDescription
}
if len(examples) == 0 {
examples = newNode.Examples
}
node = newNode
}
newNode := *node
newNode.Description = description
newNode.MarkdownDescription = markdownDescription
newNode.Examples = examples
return &newNode
}
func getRefType(node *jsonschema.Schema) string {
if node.Reference == nil {
return ""
}
return strings.TrimPrefix(*node.Reference, "#/$defs/")
}

View File

@ -0,0 +1,51 @@
package main
import (
"fmt"
"runtime"
"strings"
)
type markdownRenderer struct {
nodes []string
}
func newMardownRenderer() *markdownRenderer {
return &markdownRenderer{}
}
func (m *markdownRenderer) add(s string) *markdownRenderer {
m.nodes = append(m.nodes, s)
return m
}
func (m *markdownRenderer) PlainText(s string) *markdownRenderer {
return m.add(s)
}
func (m *markdownRenderer) LF() *markdownRenderer {
return m.add(" ")
}
func (m *markdownRenderer) H2(s string) *markdownRenderer {
return m.add("## " + s)
}
func (m *markdownRenderer) H3(s string) *markdownRenderer {
return m.add("### " + s)
}
func (m *markdownRenderer) CodeBlocks(lang, s string) *markdownRenderer {
return m.add(fmt.Sprintf("```%s%s%s%s```", lang, lineFeed(), s, lineFeed()))
}
func (m *markdownRenderer) String() string {
return strings.Join(m.nodes, lineFeed())
}
func lineFeed() string {
if runtime.GOOS == "windows" {
return "\r\n"
}
return "\n"
}

View File

@ -0,0 +1,10 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
# Configuration reference
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).
For complete bundle examples, see [_](/dev-tools/bundles/resource-examples.md) and the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples).

View File

@ -0,0 +1,70 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
---
# <DABS> resources
<DABS> allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).
This article outlines supported resource types for bundles and provides details and an example for each supported type. For additional examples, see [_](/dev-tools/bundles/resource-examples.md).
## <a id="resource-types"></a> Supported resources
The following table lists supported resource types for bundles. Some resources can be created by defining them in a bundle and deploying the bundle, and some resources only support referencing an existing resource to include in the bundle.
Resources are defined using the corresponding [Databricks REST API](/api/workspace/introduction) object's create operation request payload, where the object's supported fields, expressed as YAML, are the resource's supported properties. Links to documentation for each resource's corresponding payloads are listed in the table.
.. tip:: The `databricks bundle validate` command returns warnings if unknown resource properties are found in bundle configuration files.
.. list-table::
:header-rows: 1
* - Resource
- Create support
- Corresponding REST API object
* - [cluster](#cluster)
- ✓
- [Cluster object](/api/workspace/clusters/create)
* - [dashboard](#dashboard)
-
- [Dashboard object](/api/workspace/lakeview/create)
* - [experiment](#experiment)
- ✓
- [Experiment object](/api/workspace/experiments/createexperiment)
* - [job](#job)
- ✓
- [Job object](/api/workspace/jobs/create)
* - [model (legacy)](#model-legacy)
- ✓
- [Model (legacy) object](/api/workspace/modelregistry/createmodel)
* - [model_serving_endpoint](#model-serving-endpoint)
- ✓
- [Model serving endpoint object](/api/workspace/servingendpoints/create)
* - [pipeline](#pipeline)
- ✓
- [Pipeline object](/api/workspace/pipelines/create)
* - [quality_monitor](#quality-monitor)
- ✓
- [Quality monitor object](/api/workspace/qualitymonitors/create)
* - [registered_model](#registered-model) (<UC>)
- ✓
- [Registered model object](/api/workspace/registeredmodels/create)
* - [schema](#schema) (<UC>)
- ✓
- [Schema object](/api/workspace/schemas/create)
* - [volume](#volume) (<UC>)
- ✓
- [Volume object](/api/workspace/volumes/create)

View File

@ -0,0 +1,12 @@
package annotation
type Descriptor struct {
Description string `json:"description,omitempty"`
MarkdownDescription string `json:"markdown_description,omitempty"`
Title string `json:"title,omitempty"`
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
MarkdownExamples string `json:"markdown_examples,omitempty"`
}
const Placeholder = "PLACEHOLDER"

View File

@ -0,0 +1,44 @@
package annotation
import (
"bytes"
"os"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
)
// Parsed file with annotations, expected format:
// github.com/databricks/cli/bundle/config.Bundle:
//
// cluster_id:
// description: "Description"
type File map[string]map[string]Descriptor
func LoadAndMerge(sources []string) (File, error) {
prev := dyn.NilValue
for _, path := range sources {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
generated, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b))
if err != nil {
return nil, err
}
prev, err = merge.Merge(prev, generated)
if err != nil {
return nil, err
}
}
var data File
err := convert.ToTyped(&data, prev)
if err != nil {
return nil, err
}
return data, nil
}

View File

@ -11,6 +11,7 @@ import (
yaml3 "gopkg.in/yaml.v3"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
@ -19,60 +20,23 @@ import (
"github.com/databricks/cli/libs/jsonschema"
)
type annotation struct {
Description string `json:"description,omitempty"`
MarkdownDescription string `json:"markdown_description,omitempty"`
Title string `json:"title,omitempty"`
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
}
type annotationHandler struct {
// Annotations read from all annotation files including all overrides
parsedAnnotations annotationFile
parsedAnnotations annotation.File
// Missing annotations for fields that are found in config that need to be added to the annotation file
missingAnnotations annotationFile
missingAnnotations annotation.File
}
/**
* Parsed file with annotations, expected format:
* github.com/databricks/cli/bundle/config.Bundle:
* cluster_id:
* description: "Description"
*/
type annotationFile map[string]map[string]annotation
const Placeholder = "PLACEHOLDER"
// Adds annotations to the JSON schema reading from the annotation files.
// More details https://json-schema.org/understanding-json-schema/reference/annotations
func newAnnotationHandler(sources []string) (*annotationHandler, error) {
prev := dyn.NilValue
for _, path := range sources {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
generated, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b))
if err != nil {
return nil, err
}
prev, err = merge.Merge(prev, generated)
if err != nil {
return nil, err
}
}
var data annotationFile
err := convert.ToTyped(&data, prev)
data, err := annotation.LoadAndMerge(sources)
if err != nil {
return nil, err
}
d := &annotationHandler{}
d.parsedAnnotations = data
d.missingAnnotations = annotationFile{}
d.missingAnnotations = annotation.File{}
return d, nil
}
@ -85,7 +49,7 @@ func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema
annotations := d.parsedAnnotations[refPath]
if annotations == nil {
annotations = map[string]annotation{}
annotations = map[string]annotation.Descriptor{}
}
rootTypeAnnotation, ok := annotations[RootTypeKey]
@ -96,11 +60,11 @@ func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema
for k, v := range s.Properties {
item := annotations[k]
if item.Description == "" {
item.Description = Placeholder
item.Description = annotation.Placeholder
emptyAnnotations := d.missingAnnotations[refPath]
if emptyAnnotations == nil {
emptyAnnotations = map[string]annotation{}
emptyAnnotations = map[string]annotation.Descriptor{}
d.missingAnnotations[refPath] = emptyAnnotations
}
emptyAnnotations[k] = item
@ -124,7 +88,7 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error
for k := range d.missingAnnotations {
if !isCliPath(k) {
delete(d.missingAnnotations, k)
fmt.Printf("Missing annotations for `%s` that are not in CLI package, try to fetch latest OpenAPI spec and regenerate annotations", k)
fmt.Printf("Missing annotations for `%s` that are not in CLI package, try to fetch latest OpenAPI spec and regenerate annotations\n", k)
}
}
@ -138,7 +102,7 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error
return err
}
var outputTyped annotationFile
var outputTyped annotation.File
err = convert.ToTyped(&outputTyped, output)
if err != nil {
return err
@ -155,8 +119,8 @@ func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assignAnnotation(s *jsonschema.Schema, a annotation) {
if a.Description != Placeholder {
func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
if a.Description != annotation.Placeholder {
s.Description = a.Description
}
@ -168,7 +132,7 @@ func assignAnnotation(s *jsonschema.Schema, a annotation) {
s.Enum = a.Enum
}
func saveYamlWithStyle(outputPath string, annotations annotationFile) error {
func saveYamlWithStyle(outputPath string, annotations annotation.File) error {
annotationOrder := yamlsaver.NewOrder([]string{"description", "markdown_description", "title", "default", "enum"})
style := map[string]yaml3.Style{}
@ -220,15 +184,17 @@ func convertLinksToAbsoluteUrl(s string) string {
referencePage := "/dev-tools/bundles/reference.html"
// Regular expression to match Markdown-style links like [_](link)
re := regexp.MustCompile(`\[_\]\(([^)]+)\)`)
re := regexp.MustCompile(`\[(.*?)\]\((.*?)\)`)
result := re.ReplaceAllStringFunc(s, func(match string) string {
matches := re.FindStringSubmatch(match)
if len(matches) < 2 {
return match
}
link := matches[1]
var text, absoluteURL string
originalText := matches[1]
link := matches[2]
var text, absoluteURL string
if strings.HasPrefix(link, "#") {
text = strings.TrimPrefix(link, "#")
absoluteURL = fmt.Sprintf("%s%s%s", base, referencePage, link)
@ -246,6 +212,10 @@ func convertLinksToAbsoluteUrl(s string) string {
return match
}
if originalText != "_" {
text = originalText
}
return fmt.Sprintf("[%s](%s)", text, absoluteURL)
})

View File

@ -1,31 +1,25 @@
github.com/databricks/cli/bundle/config.Artifact:
"build":
"description": |-
An optional set of non-default build commands that you want to run locally before deployment.
For Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.
To specify multiple build commands, separate each command with double-ampersand (&&) characters.
An optional set of non-default build commands to run locally before deployment.
"executable":
"description": |-
The executable type.
The executable type. Valid values are `bash`, `sh`, and `cmd`.
"files":
"description": |-
The source files for the artifact.
"markdown_description": |-
The source files for the artifact, defined as an [_](#artifact_file).
"path":
"description": |-
The location where the built artifact will be saved.
"type":
"description": |-
The type of the artifact.
Required. The type of the artifact.
"markdown_description": |-
The type of the artifact. Valid values are `wheel` or `jar`
Required. The type of the artifact. Valid values are `whl`.
github.com/databricks/cli/bundle/config.ArtifactFile:
"source":
"description": |-
The path of the files used to build the artifact.
Required. The path of the files used to build the artifact.
github.com/databricks/cli/bundle/config.Bundle:
"cluster_id":
"description": |-
@ -44,12 +38,12 @@ github.com/databricks/cli/bundle/config.Bundle:
"description": |-
The definition of the bundle deployment
"markdown_description": |-
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
The definition of the bundle deployment. For supported attributes see [_](/dev-tools/bundles/deployment-modes.md).
"git":
"description": |-
The Git version control details that are associated with your bundle.
"markdown_description": |-
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
The Git version control details that are associated with your bundle. For supported attributes see [_](/dev-tools/bundles/settings.md#git).
"name":
"description": |-
The name of the bundle.
@ -63,8 +57,6 @@ github.com/databricks/cli/bundle/config.Deployment:
"lock":
"description": |-
The deployment lock attributes.
"markdown_description": |-
The deployment lock attributes. See [_](#lock).
github.com/databricks/cli/bundle/config.Experimental:
"pydabs":
"description": |-
@ -74,13 +66,13 @@ github.com/databricks/cli/bundle/config.Experimental:
Configures loading of Python code defined with 'databricks-bundles' package.
"python_wheel_wrapper":
"description": |-
Whether to use a Python wheel wrapper
Whether to use a Python wheel wrapper.
"scripts":
"description": |-
The commands to run
The commands to run.
"use_legacy_run_as":
"description": |-
Whether to use the legacy run_as behavior
Whether to use the legacy run_as behavior.
github.com/databricks/cli/bundle/config.Git:
"branch":
"description": |-
@ -152,66 +144,80 @@ github.com/databricks/cli/bundle/config.Resources:
PLACEHOLDER
"clusters":
"description": |-
The cluster definitions for the bundle.
The cluster definitions for the bundle, where each key is the name of a cluster.
"markdown_description": |-
The cluster definitions for the bundle. See [_](/dev-tools/bundles/resources.md#cluster)
The cluster definitions for the bundle, where each key is the name of a cluster. See [_](/dev-tools/bundles/resources.md#clusters)
"dashboards":
"description": |-
The dashboard definitions for the bundle.
The dashboard definitions for the bundle, where each key is the name of the dashboard.
"markdown_description": |-
The dashboard definitions for the bundle. See [_](/dev-tools/bundles/resources.md#dashboard)
The dashboard definitions for the bundle, where each key is the name of the dashboard. See [_](/dev-tools/bundles/resources.md#dashboards)
"experiments":
"description": |-
The experiment definitions for the bundle.
The experiment definitions for the bundle, where each key is the name of the experiment.
"markdown_description": |-
The experiment definitions for the bundle. See [_](/dev-tools/bundles/resources.md#experiment)
The experiment definitions for the bundle, where each key is the name of the experiment. See [_](/dev-tools/bundles/resources.md#experiments)
"jobs":
"description": |-
The job definitions for the bundle.
The job definitions for the bundle, where each key is the name of the job.
"markdown_description": |-
The job definitions for the bundle. See [_](/dev-tools/bundles/resources.md#job)
The job definitions for the bundle, where each key is the name of the job. See [_](/dev-tools/bundles/resources.md#jobs)
"model_serving_endpoints":
"description": |-
The model serving endpoint definitions for the bundle.
The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.
"markdown_description": |-
The model serving endpoint definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model_serving_endpoint)
The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [_](/dev-tools/bundles/resources.md#model_serving_endpoints)
"models":
"description": |-
The model definitions for the bundle.
The model definitions for the bundle, where each key is the name of the model.
"markdown_description": |-
The model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model)
The model definitions for the bundle, where each key is the name of the model. See [_](/dev-tools/bundles/resources.md#models)
"pipelines":
"description": |-
The pipeline definitions for the bundle.
The pipeline definitions for the bundle, where each key is the name of the pipeline.
"markdown_description": |-
The pipeline definitions for the bundle. See [_](/dev-tools/bundles/resources.md#pipeline)
The pipeline definitions for the bundle, where each key is the name of the pipeline. See [_](/dev-tools/bundles/resources.md#pipelines)
"quality_monitors":
"description": |-
The quality monitor definitions for the bundle.
The quality monitor definitions for the bundle, where each key is the name of the quality monitor.
"markdown_description": |-
The quality monitor definitions for the bundle. See [_](/dev-tools/bundles/resources.md#quality_monitor)
The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [_](/dev-tools/bundles/resources.md#quality_monitors)
"registered_models":
"description": |-
The registered model definitions for the bundle.
The registered model definitions for the bundle, where each key is the name of the <UC> registered model.
"markdown_description": |-
The registered model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#registered_model)
The registered model definitions for the bundle, where each key is the name of the <UC> registered model. See [_](/dev-tools/bundles/resources.md#registered_models)
"schemas":
"description": |-
The schema definitions for the bundle.
The schema definitions for the bundle, where each key is the name of the schema.
"markdown_description": |-
The schema definitions for the bundle. See [_](/dev-tools/bundles/resources.md#schema)
The schema definitions for the bundle, where each key is the name of the schema. See [_](/dev-tools/bundles/resources.md#schemas)
"volumes":
"description": |-
PLACEHOLDER
The volume definitions for the bundle, where each key is the name of the volume.
"markdown_description": |-
The volume definitions for the bundle, where each key is the name of the volume. See [_](/dev-tools/bundles/resources.md#volumes)
github.com/databricks/cli/bundle/config.Root:
"artifacts":
"description": |-
Defines the attributes to build an artifact
"markdown_description": |-
Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [_](/dev-tools/bundles/settings.md#artifacts).
Artifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [_](/dev-tools/bundles/artifact-overrides.md).
"markdown_examples": |-
```yaml
artifacts:
default:
type: whl
build: poetry build
path: .
```
"bundle":
"description": |-
The attributes of the bundle.
The bundle attributes when deploying to this target.
"markdown_description": |-
The attributes of the bundle. See [_](/dev-tools/bundles/settings.md#bundle)
The bundle attributes when deploying to this target,
"experimental":
"description": |-
Defines attributes for experimental features.
@ -222,9 +228,21 @@ github.com/databricks/cli/bundle/config.Root:
Specifies a list of path globs that contain configuration files to include within the bundle. See [_](/dev-tools/bundles/settings.md#include)
"permissions":
"description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle
Defines a permission for a specific entity.
"markdown_description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.
See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
"markdown_examples": |-
```yaml
permissions:
- level: CAN_VIEW
group_name: test-group
- level: CAN_MANAGE
user_name: someone@example.com
- level: CAN_RUN
service_principal_name: 123456-abcdef
```
"presets":
"description": |-
Defines bundle deployment presets.
@ -232,26 +250,39 @@ github.com/databricks/cli/bundle/config.Root:
Defines bundle deployment presets. See [_](/dev-tools/bundles/deployment-modes.md#presets).
"resources":
"description": |-
Specifies information about the Databricks resources used by the bundle
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.
"markdown_description": |-
Specifies information about the Databricks resources used by the bundle. See [_](/dev-tools/bundles/resources.md).
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about <DABS> supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
```yaml
resources:
<resource-type>:
<resource-name>:
<resource-field-name>: <resource-field-value>
```
"run_as":
"description": |-
The identity to use to run the bundle.
The identity to use when running <DABS> workflows.
"markdown_description": |-
The identity to use when running <DABS> workflows. See [_](/dev-tools/bundles/run-as.md).
"sync":
"description": |-
The files and file paths to include or exclude in the bundle.
"markdown_description": |-
The files and file paths to include or exclude in the bundle. See [_](/dev-tools/bundles/)
The files and file paths to include or exclude in the bundle. See [_](/dev-tools/bundles/settings.md#sync).
"targets":
"description": |-
Defines deployment targets for the bundle.
"markdown_description": |-
Defines deployment targets for the bundle. See [_](/dev-tools/bundles/settings.md#targets)
"variables":
"description": |-
A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.
"workspace":
"description": |-
Defines the Databricks workspace for the bundle.
"markdown_description": |-
Defines the Databricks workspace for the bundle. See [_](/dev-tools/bundles/settings.md#workspace).
github.com/databricks/cli/bundle/config.Sync:
"exclude":
"description": |-
@ -266,11 +297,9 @@ github.com/databricks/cli/bundle/config.Target:
"artifacts":
"description": |-
The artifacts to include in the target deployment.
"markdown_description": |-
The artifacts to include in the target deployment. See [_](#artifact)
"bundle":
"description": |-
The name of the bundle when deploying to this target.
The bundle attributes when deploying to this target.
"cluster_id":
"description": |-
The ID of the cluster to use for this target.
@ -283,8 +312,6 @@ github.com/databricks/cli/bundle/config.Target:
"git":
"description": |-
The Git version control settings for the target.
"markdown_description": |-
The Git version control settings for the target. See [_](#git).
"mode":
"description": |-
The deployment mode for the target.
@ -293,38 +320,26 @@ github.com/databricks/cli/bundle/config.Target:
"permissions":
"description": |-
The permissions for deploying and running the bundle in the target.
"markdown_description": |-
The permissions for deploying and running the bundle in the target. See [_](#permission).
"presets":
"description": |-
The deployment presets for the target.
"markdown_description": |-
The deployment presets for the target. See [_](#preset).
"resources":
"description": |-
The resource definitions for the target.
"markdown_description": |-
The resource definitions for the target. See [_](#resources).
"run_as":
"description": |-
The identity to use to run the bundle.
"markdown_description": |-
The identity to use to run the bundle. See [_](#job_run_as) and [_](/dev-tools/bundles/run_as.md).
The identity to use to run the bundle, see [_](/dev-tools/bundles/run-as.md).
"sync":
"description": |-
The local paths to sync to the target workspace when a bundle is run or deployed.
"markdown_description": |-
The local paths to sync to the target workspace when a bundle is run or deployed. See [_](#sync).
"variables":
"description": |-
The custom variable definitions for the target.
"markdown_description": |-
The custom variable definitions for the target. See [_](/dev-tools/bundles/settings.md#variables) and [_](/dev-tools/bundles/variables.md).
"workspace":
"description": |-
The Databricks workspace for the target.
"markdown_description": |-
The Databricks workspace for the target. [_](#workspace)
github.com/databricks/cli/bundle/config.Workspace:
"artifact_path":
"description": |-
@ -374,64 +389,6 @@ github.com/databricks/cli/bundle/config.Workspace:
"state_path":
"description": |-
The workspace state path
github.com/databricks/cli/bundle/config/resources.App:
"active_deployment":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
"compute_status":
"description": |-
PLACEHOLDER
"config":
"description": |-
PLACEHOLDER
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
"service_principal_id":
"description": |-
PLACEHOLDER
"service_principal_name":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Grant:
"principal":
"description": |-
@ -440,6 +397,11 @@ github.com/databricks/cli/bundle/config/resources.Grant:
"description": |-
The privileges to grant to the specified entity
github.com/databricks/cli/bundle/config/resources.Permission:
"-":
"description": |-
Defines a permission for a specific entity.
"markdown_description": |-
Defines a permission for a specific entity. See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
"group_name":
"description": |-
The name of the group that has the permission set in level.
@ -506,6 +468,11 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
"description": |-
The type of the variable.
github.com/databricks/cli/bundle/config/variable.Variable:
"_":
"description": |-
Defines a custom variable for the bundle.
"markdown_description": |-
Defines a custom variable for the bundle. See [_](/dev-tools/bundles/settings.md#variables).
"default":
"description": |-
PLACEHOLDER
@ -516,107 +483,14 @@ github.com/databricks/cli/bundle/config/variable.Variable:
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"markdown_description": |-
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.
"type":
"description": |-
The type of the variable.
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs:
"service_principal_name":
"description": |-
PLACEHOLDER
"creator":
The application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.
"user_name":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"description":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"secret":
"description": |-
PLACEHOLDER
"serving_endpoint":
"description": |-
PLACEHOLDER
"sql_warehouse":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
The email of an active workspace user. Non-admin users can only set this field to their own email.

View File

@ -1,4 +1,7 @@
github.com/databricks/cli/bundle/config/resources.App:
"active_deployment":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
@ -8,9 +11,30 @@ github.com/databricks/cli/bundle/config/resources.App:
"config":
"description": |-
PLACEHOLDER
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
@ -23,7 +47,46 @@ github.com/databricks/cli/bundle/config/resources.App:
"source_code_path":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Cluster:
"_":
"markdown_description": |-
The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create).
"markdown_examples": |-
The following example creates a cluster named `my_cluster` and sets that as the cluster to use to run the notebook in `my_job`:
```yaml
bundle:
name: clusters
resources:
clusters:
my_cluster:
num_workers: 2
node_type_id: "i3.xlarge"
autoscale:
min_workers: 2
max_workers: 7
spark_version: "13.3.x-scala2.12"
spark_conf:
"spark.executor.memory": "2g"
jobs:
my_job:
tasks:
- task_key: test_task
notebook_task:
notebook_path: "./src/my_notebook.py"
```
"data_security_mode":
"description": |-
PLACEHOLDER
@ -43,6 +106,24 @@ github.com/databricks/cli/bundle/config/resources.Cluster:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Dashboard:
"_":
"markdown_description": |-
The dashboard resource allows you to manage [AI/BI dashboards](/api/workspace/lakeview/create) in a bundle. For information about AI/BI dashboards, see [_](/dashboards/index.md).
"markdown_examples": |-
The following example includes and deploys the sample __NYC Taxi Trip Analysis__ dashboard to the Databricks workspace.
``` yaml
resources:
dashboards:
nyc_taxi_trip_analysis:
display_name: "NYC Taxi Trip Analysis"
file_path: ../src/nyc_taxi_trip_analysis.lvdash.json
warehouse_id: ${var.warehouse_id}
```
If you use the UI to modify the dashboard, modifications made through the UI are not applied to the dashboard JSON file in the local bundle unless you explicitly update it using `bundle generate`. You can use the `--watch` option to continuously poll and retrieve changes to the dashboard. See [_](/dev-tools/cli/bundle-commands.md#generate).
In addition, if you attempt to deploy a bundle that contains a dashboard JSON file that is different than the one in the remote workspace, an error will occur. To force the deploy and overwrite the dashboard in the remote workspace with the local one, use the `--force` option. See [_](/dev-tools/cli/bundle-commands.md#deploy).
"embed_credentials":
"description": |-
PLACEHOLDER
@ -53,6 +134,24 @@ github.com/databricks/cli/bundle/config/resources.Dashboard:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Job:
"_":
"markdown_description": |-
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a <DABS> template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
"markdown_examples": |-
The following example defines a job with the resource key `hello-job` with one notebook task:
```yaml
resources:
jobs:
hello-job:
name: hello-job
tasks:
- task_key: hello-task
notebook_task:
notebook_path: ./hello.py
```
For information about defining job tasks and overriding job settings, see [_](/dev-tools/bundles/job-task-types.md), [_](/dev-tools/bundles/job-task-override.md), and [_](/dev-tools/bundles/cluster-override.md).
"health":
"description": |-
PLACEHOLDER
@ -63,30 +162,186 @@ github.com/databricks/cli/bundle/config/resources.Job:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
"_":
"markdown_description": |-
The experiment resource allows you to define [MLflow experiments](/api/workspace/experiments/createexperiment) in a bundle. For information about MLflow experiments, see [_](/mlflow/experiments.md).
"markdown_examples": |-
The following example defines an experiment that all users can view:
```yaml
resources:
experiments:
experiment:
name: my_ml_experiment
permissions:
- level: CAN_READ
group_name: users
description: MLflow experiment used to track runs
```
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowModel:
"_":
"markdown_description": |-
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use <UC> [registered models](#registered-model) instead.
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"_":
"markdown_description": |-
The model_serving_endpoint resource allows you to define [model serving endpoints](/api/workspace/servingendpoints/create). See [_](/machine-learning/model-serving/manage-serving-endpoints.md).
"markdown_examples": |-
The following example defines a <UC> model serving endpoint:
```yaml
resources:
model_serving_endpoints:
uc_model_serving_endpoint:
name: "uc-model-endpoint"
config:
served_entities:
- entity_name: "myCatalog.mySchema.my-ads-model"
entity_version: "10"
workload_size: "Small"
scale_to_zero_enabled: "true"
traffic_config:
routes:
- served_model_name: "my-ads-model-10"
traffic_percentage: "100"
tags:
- key: "team"
value: "data science"
```
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Pipeline:
"_":
"markdown_description": |-
The pipeline resource allows you to create <DLT> [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the <DABS> template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
"markdown_examples": |-
The following example defines a pipeline with the resource key `hello-pipeline`:
```yaml
resources:
pipelines:
hello-pipeline:
name: hello-pipeline
clusters:
- label: default
num_workers: 1
development: true
continuous: false
channel: CURRENT
edition: CORE
photon: false
libraries:
- notebook:
path: ./pipeline.py
```
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"_":
"markdown_description": |-
The quality_monitor resource allows you to define a <UC> [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
"markdown_examples": |-
The following example defines a quality monitor:
```yaml
resources:
quality_monitors:
my_quality_monitor:
table_name: dev.mlops_schema.predictions
output_schema_name: ${bundle.target}.mlops_schema
assets_dir: /Users/${workspace.current_user.userName}/databricks_lakehouse_monitoring
inference_log:
granularities: [1 day]
model_id_col: model_id
prediction_col: prediction
label_col: price
problem_type: PROBLEM_TYPE_REGRESSION
timestamp_col: timestamp
schedule:
quartz_cron_expression: 0 0 8 * * ? # Run Every day at 8am
timezone_id: UTC
```
"table_name":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"_":
"markdown_description": |-
The registered model resource allows you to define models in <UC>. For information about <UC> [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
"markdown_examples": |-
The following example defines a registered model in <UC>:
```yaml
resources:
registered_models:
model:
name: my_model
catalog_name: ${bundle.target}
schema_name: mlops_schema
comment: Registered model in Unity Catalog for ${bundle.target} deployment target
grants:
- privileges:
- EXECUTE
principal: account users
```
"grants":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Schema:
"_":
"markdown_description": |-
The schema resource type allows you to define <UC> [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.
- Only fields supported by the corresponding [Schemas object create API](/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](/api/workspace/schemas/update).
"markdown_examples": |-
The following example defines a pipeline with the resource key `my_pipeline` that creates a <UC> schema with the key `my_schema` as the target:
```yaml
resources:
pipelines:
my_pipeline:
name: test-pipeline-{{.unique_id}}
libraries:
- notebook:
path: ./nb.sql
development: true
catalog: main
target: ${resources.schemas.my_schema.id}
schemas:
my_schema:
name: test-schema-{{.unique_id}}
catalog_name: main
comment: This schema was created by DABs.
```
A top-level grants mapping is not supported by <DABS>, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
The following example defines a <UC> schema with grants:
```yaml
resources:
schemas:
my_schema:
name: test-schema
grants:
- principal: users
privileges:
- CAN_MANAGE
- principal: my_team
privileges:
- CAN_READ
catalog_name: main
```
"grants":
"description": |-
PLACEHOLDER
@ -94,6 +349,27 @@ github.com/databricks/cli/bundle/config/resources.Schema:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Volume:
"_":
"markdown_description": |-
The volume resource type allows you to define and create <UC> [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets).
"markdown_examples": |-
The following example creates a <UC> volume with the key `my_volume`:
```yaml
resources:
volumes:
my_volume:
catalog_name: main
name: my_volume
schema_name: my_schema
```
For an example bundle that runs a job that writes to a file in <UC> volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
"grants":
"description": |-
PLACEHOLDER
@ -197,6 +473,85 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
"manual":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
"entity_version":
"description": |-

View File

@ -33,6 +33,10 @@ func TestConvertLinksToAbsoluteUrl(t *testing.T) {
input: "This is a link to [external](https://external.com)",
expected: "This is a link to [external](https://external.com)",
},
{
input: "This is a link to [one](/relative), [two](/relative-2)",
expected: "This is a link to [one](https://docs.databricks.com/relative), [two](https://docs.databricks.com/relative-2)",
},
}
for _, test := range tests {

View File

@ -10,6 +10,7 @@ import (
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
@ -113,13 +114,13 @@ func TestNoDetachedAnnotations(t *testing.T) {
assert.Empty(t, types, "Detached annotations found, regenerate schema and check for package path changes")
}
func getAnnotations(path string) (annotationFile, error) {
func getAnnotations(path string) (annotation.File, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var data annotationFile
var data annotation.File
err = yaml.Unmarshal(b, &data)
return data, err
}

View File

@ -8,6 +8,7 @@ import (
"reflect"
"strings"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/jsonschema"
"gopkg.in/yaml.v3"
)
@ -114,8 +115,8 @@ func mapIncorrectTypNames(ref string) string {
// Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error {
annotations := annotationFile{}
overrides := annotationFile{}
annotations := annotation.File{}
overrides := annotation.File{}
b, err := os.ReadFile(overridesPath)
if err != nil {
@ -126,7 +127,7 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
return err
}
if overrides == nil {
overrides = annotationFile{}
overrides = annotation.File{}
}
_, err = jsonschema.FromType(typ, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
@ -137,16 +138,16 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
}
basePath := getPath(typ)
pkg := map[string]annotation{}
pkg := map[string]annotation.Descriptor{}
annotations[basePath] = pkg
if ref.Description != "" || ref.Enum != nil {
pkg[RootTypeKey] = annotation{Description: ref.Description, Enum: ref.Enum}
pkg[RootTypeKey] = annotation.Descriptor{Description: ref.Description, Enum: ref.Enum}
}
for k := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
pkg[k] = annotation{Description: refProp.Description, Enum: refProp.Enum}
pkg[k] = annotation.Descriptor{Description: refProp.Description, Enum: refProp.Enum}
if refProp.Description == "" {
addEmptyOverride(k, basePath, overrides)
}
@ -195,22 +196,22 @@ func prependCommentToFile(outputPath, comment string) error {
return err
}
func addEmptyOverride(key, pkg string, overridesFile annotationFile) {
func addEmptyOverride(key, pkg string, overridesFile annotation.File) {
if overridesFile[pkg] == nil {
overridesFile[pkg] = map[string]annotation{}
overridesFile[pkg] = map[string]annotation.Descriptor{}
}
overrides := overridesFile[pkg]
if overrides[key].Description == "" {
overrides[key] = annotation{Description: Placeholder}
overrides[key] = annotation.Descriptor{Description: annotation.Placeholder}
}
a, ok := overrides[key]
if !ok {
a = annotation{}
a = annotation.Descriptor{}
}
if a.Description == "" {
a.Description = Placeholder
a.Description = annotation.Placeholder
}
overrides[key] = a
}

View File

@ -258,7 +258,8 @@
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The cluster resource defines an [all-purpose cluster](https://docs.databricks.com/api/workspace/clusters/create)."
},
{
"type": "string",
@ -321,7 +322,8 @@
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The dashboard resource allows you to manage [AI/BI dashboards](https://docs.databricks.com/api/workspace/lakeview/create) in a bundle. For information about AI/BI dashboards, see [link](https://docs.databricks.com/dashboards/index.html)."
},
{
"type": "string",
@ -442,7 +444,8 @@
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a \u003cDABS\u003e template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
},
{
"type": "string",
@ -487,7 +490,8 @@
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The experiment resource allows you to define [MLflow experiments](https://docs.databricks.com/api/workspace/experiments/createexperiment) in a bundle. For information about MLflow experiments, see [link](https://docs.databricks.com/mlflow/experiments.html)."
},
{
"type": "string",
@ -532,7 +536,8 @@
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use \u003cUC\u003e [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
},
{
"type": "string",
@ -576,7 +581,8 @@
"additionalProperties": false,
"required": [
"name"
]
],
"markdownDescription": "The model_serving_endpoint resource allows you to define [model serving endpoints](https://docs.databricks.com/api/workspace/servingendpoints/create). See [link](https://docs.databricks.com/machine-learning/model-serving/manage-serving-endpoints.html)."
},
{
"type": "string",
@ -718,7 +724,8 @@
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The pipeline resource allows you to create \u003cDLT\u003e [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the \u003cDABS\u003e template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
},
{
"type": "string",
@ -792,7 +799,8 @@
"table_name",
"assets_dir",
"output_schema_name"
]
],
"markdownDescription": "The quality_monitor resource allows you to define a \u003cUC\u003e [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
},
{
"type": "string",
@ -834,7 +842,8 @@
"catalog_name",
"name",
"schema_name"
]
],
"markdownDescription": "The registered model resource allows you to define models in \u003cUC\u003e. For information about \u003cUC\u003e [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
},
{
"type": "string",
@ -874,7 +883,8 @@
"required": [
"catalog_name",
"name"
]
],
"markdownDescription": "The schema resource type allows you to define \u003cUC\u003e [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
},
{
"type": "string",
@ -919,7 +929,8 @@
"catalog_name",
"name",
"schema_name"
]
],
"markdownDescription": "The volume resource type allows you to define and create \u003cUC\u003e [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use \u003cDABS\u003e to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
},
{
"type": "string",
@ -1005,6 +1016,7 @@
},
"variable.Variable": {
"type": "object",
"description": "Defines a custom variable for the bundle.",
"properties": {
"default": {
"$ref": "#/$defs/interface"
@ -1016,14 +1028,15 @@
"lookup": {
"description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup",
"markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.\""
"markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
},
"type": {
"description": "The type of the variable.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "Defines a custom variable for the bundle. See [variables](https://docs.databricks.com/dev-tools/bundles/settings.html#variables)."
},
"variable.VariableType": {
"type": "string"
@ -1035,26 +1048,25 @@
"type": "object",
"properties": {
"build": {
"description": "An optional set of non-default build commands that you want to run locally before deployment.\n\nFor Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.\n\nTo specify multiple build commands, separate each command with double-ampersand (\u0026\u0026) characters.",
"description": "An optional set of non-default build commands to run locally before deployment.",
"$ref": "#/$defs/string"
},
"executable": {
"description": "The executable type.",
"description": "The executable type. Valid values are `bash`, `sh`, and `cmd`.",
"$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType"
},
"files": {
"description": "The source files for the artifact.",
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile",
"markdownDescription": "The source files for the artifact, defined as an [artifact_file](https://docs.databricks.com/dev-tools/bundles/reference.html#artifact_file)."
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile"
},
"path": {
"description": "The location where the built artifact will be saved.",
"$ref": "#/$defs/string"
},
"type": {
"description": "The type of the artifact.",
"description": "Required. The type of the artifact.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType",
"markdownDescription": "The type of the artifact. Valid values are `wheel` or `jar`"
"markdownDescription": "Required. The type of the artifact. Valid values are `whl`."
}
},
"additionalProperties": false,
@ -1074,7 +1086,7 @@
"type": "object",
"properties": {
"source": {
"description": "The path of the files used to build the artifact.",
"description": "Required. The path of the files used to build the artifact.",
"$ref": "#/$defs/string"
}
},
@ -1113,12 +1125,12 @@
"deployment": {
"description": "The definition of the bundle deployment",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment",
"markdownDescription": "The definition of the bundle deployment. For supported attributes, see [deployment](https://docs.databricks.com/dev-tools/bundles/reference.html#deployment) and [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)."
"markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)."
},
"git": {
"description": "The Git version control details that are associated with your bundle.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git",
"markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes, see [git](https://docs.databricks.com/dev-tools/bundles/reference.html#git) and [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)."
"markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)."
},
"name": {
"description": "The name of the bundle.",
@ -1154,8 +1166,7 @@
},
"lock": {
"description": "The deployment lock attributes.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock",
"markdownDescription": "The deployment lock attributes. See [lock](https://docs.databricks.com/dev-tools/bundles/reference.html#lock)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock"
}
},
"additionalProperties": false
@ -1180,15 +1191,15 @@
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python"
},
"python_wheel_wrapper": {
"description": "Whether to use a Python wheel wrapper",
"description": "Whether to use a Python wheel wrapper.",
"$ref": "#/$defs/bool"
},
"scripts": {
"description": "The commands to run",
"description": "The commands to run.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command"
},
"use_legacy_run_as": {
"description": "Whether to use the legacy run_as behavior",
"description": "Whether to use the legacy run_as behavior.",
"$ref": "#/$defs/bool"
}
},
@ -1352,57 +1363,59 @@
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App"
},
"clusters": {
"description": "The cluster definitions for the bundle.",
"description": "The cluster definitions for the bundle, where each key is the name of a cluster.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster",
"markdownDescription": "The cluster definitions for the bundle. See [cluster](https://docs.databricks.com/dev-tools/bundles/resources.html#cluster)"
"markdownDescription": "The cluster definitions for the bundle, where each key is the name of a cluster. See [clusters](https://docs.databricks.com/dev-tools/bundles/resources.html#clusters)"
},
"dashboards": {
"description": "The dashboard definitions for the bundle.",
"description": "The dashboard definitions for the bundle, where each key is the name of the dashboard.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard",
"markdownDescription": "The dashboard definitions for the bundle. See [dashboard](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboard)"
"markdownDescription": "The dashboard definitions for the bundle, where each key is the name of the dashboard. See [dashboards](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboards)"
},
"experiments": {
"description": "The experiment definitions for the bundle.",
"description": "The experiment definitions for the bundle, where each key is the name of the experiment.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment",
"markdownDescription": "The experiment definitions for the bundle. See [experiment](https://docs.databricks.com/dev-tools/bundles/resources.html#experiment)"
"markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments)"
},
"jobs": {
"description": "The job definitions for the bundle.",
"description": "The job definitions for the bundle, where each key is the name of the job.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job",
"markdownDescription": "The job definitions for the bundle. See [job](https://docs.databricks.com/dev-tools/bundles/resources.html#job)"
"markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs)"
},
"model_serving_endpoints": {
"description": "The model serving endpoint definitions for the bundle.",
"description": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint",
"markdownDescription": "The model serving endpoint definitions for the bundle. See [model_serving_endpoint](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoint)"
"markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints)"
},
"models": {
"description": "The model definitions for the bundle.",
"description": "The model definitions for the bundle, where each key is the name of the model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel",
"markdownDescription": "The model definitions for the bundle. See [model](https://docs.databricks.com/dev-tools/bundles/resources.html#model)"
"markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models)"
},
"pipelines": {
"description": "The pipeline definitions for the bundle.",
"description": "The pipeline definitions for the bundle, where each key is the name of the pipeline.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline",
"markdownDescription": "The pipeline definitions for the bundle. See [pipeline](https://docs.databricks.com/dev-tools/bundles/resources.html#pipeline)"
"markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines)"
},
"quality_monitors": {
"description": "The quality monitor definitions for the bundle.",
"description": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor",
"markdownDescription": "The quality monitor definitions for the bundle. See [quality_monitor](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitor)"
"markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)"
},
"registered_models": {
"description": "The registered model definitions for the bundle.",
"description": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel",
"markdownDescription": "The registered model definitions for the bundle. See [registered_model](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_model)"
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
},
"schemas": {
"description": "The schema definitions for the bundle.",
"description": "The schema definitions for the bundle, where each key is the name of the schema.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema",
"markdownDescription": "The schema definitions for the bundle. See [schema](https://docs.databricks.com/dev-tools/bundles/resources.html#schema)"
"markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas)"
},
"volumes": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume"
"description": "The volume definitions for the bundle, where each key is the name of the volume.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume",
"markdownDescription": "The volume definitions for the bundle, where each key is the name of the volume. See [volumes](https://docs.databricks.com/dev-tools/bundles/resources.html#volumes)"
}
},
"additionalProperties": false
@ -1446,11 +1459,10 @@
"properties": {
"artifacts": {
"description": "The artifacts to include in the target deployment.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact",
"markdownDescription": "The artifacts to include in the target deployment. See [artifact](https://docs.databricks.com/dev-tools/bundles/reference.html#artifact)"
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
},
"bundle": {
"description": "The name of the bundle when deploying to this target.",
"description": "The bundle attributes when deploying to this target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle"
},
"cluster_id": {
@ -1467,8 +1479,7 @@
},
"git": {
"description": "The Git version control settings for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git",
"markdownDescription": "The Git version control settings for the target. See [git](https://docs.databricks.com/dev-tools/bundles/reference.html#git)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git"
},
"mode": {
"description": "The deployment mode for the target.",
@ -1477,38 +1488,32 @@
},
"permissions": {
"description": "The permissions for deploying and running the bundle in the target.",
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission",
"markdownDescription": "The permissions for deploying and running the bundle in the target. See [permission](https://docs.databricks.com/dev-tools/bundles/reference.html#permission)."
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"presets": {
"description": "The deployment presets for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets",
"markdownDescription": "The deployment presets for the target. See [preset](https://docs.databricks.com/dev-tools/bundles/reference.html#preset)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets"
},
"resources": {
"description": "The resource definitions for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "The resource definitions for the target. See [resources](https://docs.databricks.com/dev-tools/bundles/reference.html#resources)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources"
},
"run_as": {
"description": "The identity to use to run the bundle.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use to run the bundle. See [job_run_as](https://docs.databricks.com/dev-tools/bundles/reference.html#job_run_as) and [link](https://docs.databricks.com/dev-tools/bundles/run_as.html)."
"markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The local paths to sync to the target workspace when a bundle is run or deployed.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync",
"markdownDescription": "The local paths to sync to the target workspace when a bundle is run or deployed. See [sync](https://docs.databricks.com/dev-tools/bundles/reference.html#sync)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync"
},
"variables": {
"description": "The custom variable definitions for the target.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable",
"markdownDescription": "The custom variable definitions for the target. See [variables](https://docs.databricks.com/dev-tools/bundles/settings.html#variables) and [link](https://docs.databricks.com/dev-tools/bundles/variables.html)."
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable"
},
"workspace": {
"description": "The Databricks workspace for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace",
"markdownDescription": "The Databricks workspace for the target. [workspace](https://docs.databricks.com/dev-tools/bundles/reference.html#workspace)"
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
}
},
"additionalProperties": false
@ -1719,12 +1724,14 @@
"type": "object",
"properties": {
"description": {
"description": "Description of the App Resource.",
"$ref": "#/$defs/string"
},
"job": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob"
},
"name": {
"description": "Name of the App Resource.",
"$ref": "#/$defs/string"
},
"secret": {
@ -1980,6 +1987,7 @@
"$ref": "#/$defs/string"
},
"state": {
"description": "State of the app compute.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ComputeState"
}
},
@ -3701,7 +3709,7 @@
"description": "Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job.\n\nEither `user_name` or `service_principal_name` should be specified. If not, an error is thrown.",
"properties": {
"service_principal_name": {
"description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.",
"description": "The application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.",
"$ref": "#/$defs/string"
},
"user_name": {
@ -7227,12 +7235,13 @@
"properties": {
"artifacts": {
"description": "Defines the attributes to build an artifact",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact",
"markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html)."
},
"bundle": {
"description": "The attributes of the bundle.",
"description": "The bundle attributes when deploying to this target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle",
"markdownDescription": "The attributes of the bundle. See [bundle](https://docs.databricks.com/dev-tools/bundles/settings.html#bundle)"
"markdownDescription": "The bundle attributes when deploying to this target,"
},
"experimental": {
"description": "Defines attributes for experimental features.",
@ -7244,9 +7253,9 @@
"markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include)"
},
"permissions": {
"description": "Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle",
"description": "Defines a permission for a specific entity.",
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission",
"markdownDescription": "Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html)."
"markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html)."
},
"presets": {
"description": "Defines bundle deployment presets.",
@ -7254,22 +7263,24 @@
"markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets)."
},
"resources": {
"description": "Specifies information about the Databricks resources used by the bundle",
"description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "Specifies information about the Databricks resources used by the bundle. See [link](https://docs.databricks.com/dev-tools/bundles/resources.html)."
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about \u003cDABS\u003e supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
},
"run_as": {
"description": "The identity to use to run the bundle.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs"
"description": "The identity to use when running \u003cDABS\u003e workflows.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use when running \u003cDABS\u003e workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The files and file paths to include or exclude in the bundle.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync",
"markdownDescription": "The files and file paths to include or exclude in the bundle. See [link](https://docs.databricks.com/dev-tools/bundles/)"
"markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync)."
},
"targets": {
"description": "Defines deployment targets for the bundle.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target"
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target",
"markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)"
},
"variables": {
"description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.",
@ -7277,7 +7288,8 @@
},
"workspace": {
"description": "Defines the Databricks workspace for the bundle.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace",
"markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace)."
}
},
"additionalProperties": {}

View File

@ -13,7 +13,6 @@ import (
func TestGitAutoLoadWithEnvironment(t *testing.T) {
b := load(t, "./environments_autoload_git")
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
assert.True(t, b.Config.Bundle.Git.Inferred)
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
}
@ -21,7 +20,6 @@ func TestGitAutoLoadWithEnvironment(t *testing.T) {
func TestGitManuallySetBranchWithEnvironment(t *testing.T) {
b := loadTarget(t, "./environments_autoload_git", "production")
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
assert.False(t, b.Config.Bundle.Git.Inferred)
assert.Equal(t, "main", b.Config.Bundle.Git.Branch)
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)

View File

@ -14,7 +14,6 @@ import (
func TestGitAutoLoad(t *testing.T) {
b := load(t, "./autoload_git")
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
assert.True(t, b.Config.Bundle.Git.Inferred)
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
}
@ -22,7 +21,6 @@ func TestGitAutoLoad(t *testing.T) {
func TestGitManuallySetBranch(t *testing.T) {
b := loadTarget(t, "./autoload_git", "production")
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
assert.False(t, b.Config.Bundle.Git.Inferred)
assert.Equal(t, "main", b.Config.Bundle.Git.Branch)
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
@ -36,7 +34,6 @@ func TestGitBundleBranchValidation(t *testing.T) {
b := load(t, "./git_branch_validation")
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
assert.False(t, b.Config.Bundle.Git.Inferred)
assert.Equal(t, "feature-a", b.Config.Bundle.Git.Branch)
assert.Equal(t, "feature-b", b.Config.Bundle.Git.ActualBranch)

View File

@ -6,7 +6,9 @@ import (
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/testdiff"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
@ -35,3 +37,40 @@ func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
// deploy empty bundle again
deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
}
func TestBasicBundleDeployWithDoubleUnderscoreVariables(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root := initTestTemplate(t, ctx, "basic_with_variables", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
currentUser, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
ctx, replacements := testdiff.WithReplacementsMap(ctx)
replacements.Set(uniqueId, "$UNIQUE_PRJ")
replacements.Set(currentUser.UserName, "$USERNAME")
t.Cleanup(func() {
destroyBundle(t, ctx, root)
})
testutil.Chdir(t, root)
testcli.AssertOutput(
t,
ctx,
[]string{"bundle", "validate"},
testutil.TestData("testdata/basic_with_variables/bundle_validate.txt"),
)
testcli.AssertOutput(
t,
ctx,
[]string{"bundle", "deploy", "--force-lock", "--auto-approve"},
testutil.TestData("testdata/basic_with_variables/bundle_deploy.txt"),
)
}

View File

@ -0,0 +1,21 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for job name"
},
"spark_version": {
"type": "string",
"description": "Spark version used for job cluster"
},
"node_type_id": {
"type": "string",
"description": "Node type id for job cluster"
},
"root_path": {
"type": "string",
"description": "Root path to deploy bundle to",
"default": ""
}
}
}

View File

@ -0,0 +1,32 @@
bundle:
name: basic
workspace:
{{ if .root_path }}
root_path: "{{.root_path}}/.bundle/{{.unique_id}}"
{{ else }}
root_path: "~/.bundle/{{.unique_id}}"
{{ end }}
variables:
task__key: # Note: the variable has double underscore
default: my_notebook_task
resources:
jobs:
foo__bar: # Note: the resource has double underscore to check that TF provider can use such names
name: test-job-basic-{{.unique_id}}
tasks:
- task_key: ${var.task__key}
new_cluster:
num_workers: 1
spark_version: "{{.spark_version}}"
node_type_id: "{{.node_type_id}}"
spark_python_task:
python_file: ./hello_world.py
foo:
name: test-job-basic-ref-{{.unique_id}}
tasks:
- task_key: job_task
run_job_task:
job_id: ${resources.jobs.foo__bar.id}

View File

@ -0,0 +1 @@
print("Hello World!")

View File

@ -0,0 +1,4 @@
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,7 @@
Name: basic
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ
Validation OK!

View File

@ -7,8 +7,7 @@
"exec_path": "/tmp/.../terraform"
},
"git": {
"bundle_root_path": ".",
"inferred": true
"bundle_root_path": "."
},
"mode": "development",
"deployment": {

View File

@ -0,0 +1,65 @@
package telemetry
import (
"encoding/json"
"testing"
"time"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/libs/telemetry"
"github.com/databricks/cli/libs/telemetry/protos"
"github.com/databricks/databricks-sdk-go/client"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTelemetryEndpoint(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
apiClient, err := client.New(w.Config)
require.NoError(t, err)
logs := []protos.FrontendLog{
{
FrontendLogEventID: uuid.New().String(),
Entry: protos.FrontendLogEntry{
DatabricksCliLog: protos.DatabricksCliLog{
CliTestEvent: &protos.CliTestEvent{Name: protos.DummyCliEnumValue1},
},
},
},
{
FrontendLogEventID: uuid.New().String(),
Entry: protos.FrontendLogEntry{
DatabricksCliLog: protos.DatabricksCliLog{
CliTestEvent: &protos.CliTestEvent{Name: protos.DummyCliEnumValue2},
},
},
},
}
protoLogs := make([]string, len(logs))
for i, log := range logs {
b, err := json.Marshal(log)
require.NoError(t, err)
protoLogs[i] = string(b)
}
reqB := telemetry.RequestBody{
UploadTime: time.Now().UnixMilli(),
Items: []string{},
ProtoLogs: protoLogs,
}
respB := telemetry.ResponseBody{}
err = apiClient.Do(ctx, "POST", "/telemetry-ext", nil, nil, reqB, &respB)
require.NoError(t, err)
assert.Equal(t, telemetry.ResponseBody{
Errors: []telemetry.LogError{},
NumProtoSuccess: int64(2),
}, respB)
}

View File

@ -1,12 +1,16 @@
package dynvar
import (
"fmt"
"regexp"
"github.com/databricks/cli/libs/dyn"
)
var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`)
var (
baseVarDef = `[a-zA-Z]+([-_]*[a-zA-Z0-9]+)*`
re = regexp.MustCompile(fmt.Sprintf(`\$\{(%s(\.%s(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`, baseVarDef, baseVarDef))
)
// ref represents a variable reference.
// It is a string [dyn.Value] contained in a larger [dyn.Value].

View File

@ -15,9 +15,13 @@ func TestNewRefNoString(t *testing.T) {
func TestNewRefValidPattern(t *testing.T) {
for in, refs := range map[string][]string{
"${hello_world.world_world}": {"hello_world.world_world"},
"${helloworld.world-world}": {"helloworld.world-world"},
"${hello-world.world-world}": {"hello-world.world-world"},
"${hello_world.world_world}": {"hello_world.world_world"},
"${helloworld.world-world}": {"helloworld.world-world"},
"${hello-world.world-world}": {"hello-world.world-world"},
"${hello_world.world__world}": {"hello_world.world__world"},
"${hello_world.world--world}": {"hello_world.world--world"},
"${hello_world.world-_world}": {"hello_world.world-_world"},
"${hello_world.world_-world}": {"hello_world.world_-world"},
} {
ref, ok := newRef(dyn.V(in))
require.True(t, ok, "should match valid pattern: %s", in)
@ -36,8 +40,6 @@ func TestNewRefInvalidPattern(t *testing.T) {
"${_-_._-_.id}", // cannot use _- in sequence
"${0helloworld.world-world}", // interpolated first section shouldn't start with number
"${helloworld.9world-world}", // interpolated second section shouldn't start with number
"${a-a.a-_a-a.id}", // fails because of -_ in the second segment
"${a-a.a--a-a.id}", // fails because of -- in the second segment
}
for _, v := range invalid {
_, ok := newRef(dyn.V(v))

View File

@ -111,6 +111,10 @@ func FromType(typ reflect.Type, fns []func(typ reflect.Type, s Schema) Schema) (
return res, nil
}
func TypePath(typ reflect.Type) string {
return typePath(typ)
}
// typePath computes a unique string representation of the type. $ref in the generated
// JSON schema will refer to this path. See TestTypePath for examples outputs.
func typePath(typ reflect.Type) string {

View File

@ -76,6 +76,10 @@ type Schema struct {
// Title of the object, rendered as inline documentation in the IDE.
// https://json-schema.org/understanding-json-schema/reference/annotations
Title string `json:"title,omitempty"`
// Examples of the value for properties in the schema.
// https://json-schema.org/understanding-json-schema/reference/annotations
Examples []any `json:"examples,omitempty"`
}
// Default value defined in a JSON Schema, represented as a string.

31
libs/telemetry/api.go Normal file
View File

@ -0,0 +1,31 @@
package telemetry
// RequestBody is the request body type bindings for the /telemetry-ext API endpoint.
type RequestBody struct {
// Timestamp in millis for when the log was uploaded.
UploadTime int64 `json:"uploadTime"`
// DO NOT USE. This is the legacy field for logging in usage logs (not lumberjack).
// We keep this around because the API endpoint works only if this field is serialized
// to an empty array.
Items []string `json:"items"`
// JSON encoded strings containing the proto logs. Since it's represented as a
// string here, the values here end up being double JSON encoded in the final
// request body.
//
// Any logs here will be logged in our lumberjack tables as long as a corresponding
// protobuf is defined in universe.
ProtoLogs []string `json:"protoLogs"`
}
// ResponseBody is the response body type bindings for the /telemetry-ext API endpoint.
type ResponseBody struct {
Errors []LogError `json:"errors"`
NumProtoSuccess int64 `json:"numProtoSuccess"`
}
type LogError struct {
Message string `json:"message"`
ErrorType string `json:"errorType"`
}

View File

@ -0,0 +1,2 @@
The types in this package are equivalent to the lumberjack protos defined in Universe.
You can find all lumberjack protos for the Databricks CLI in the `proto/logs/frontend/databricks_cli` directory.

View File

@ -0,0 +1,77 @@
package protos
type BundleDeployEvent struct {
// UUID associated with the bundle itself. Set in the `bundle.uuid` field in the bundle configuration.
BundleUuid string `json:"bundle_uuid,omitempty"`
ResourceCount int64 `json:"resource_count,omitempty"`
ResourceJobCount int64 `json:"resource_job_count,omitempty"`
ResourcePipelineCount int64 `json:"resource_pipeline_count,omitempty"`
ResourceModelCount int64 `json:"resource_model_count,omitempty"`
ResourceExperimentCount int64 `json:"resource_experiment_count,omitempty"`
ResourceModelServingEndpointCount int64 `json:"resource_model_serving_endpoint_count,omitempty"`
ResourceRegisteredModelCount int64 `json:"resource_registered_model_count,omitempty"`
ResourceQualityMonitorCount int64 `json:"resource_quality_monitor_count,omitempty"`
ResourceSchemaCount int64 `json:"resource_schema_count,omitempty"`
ResourceVolumeCount int64 `json:"resource_volume_count,omitempty"`
ResourceClusterCount int64 `json:"resource_cluster_count,omitempty"`
ResourceDashboardCount int64 `json:"resource_dashboard_count,omitempty"`
ResourceAppCount int64 `json:"resource_app_count,omitempty"`
// IDs of resources managed by the bundle. Some resources like volumes or schemas
// do not expose a numerical or UUID identifier and are tracked by name. Those
// resources are not tracked here since the names are PII.
ResourceJobIDs []string `json:"resource_job_ids,omitempty"`
ResourcePipelineIDs []string `json:"resource_pipeline_ids,omitempty"`
ResourceClusterIDs []string `json:"resource_cluster_ids,omitempty"`
ResourceDashboardIDs []string `json:"resource_dashboard_ids,omitempty"`
Experimental *BundleDeployExperimental `json:"experimental,omitempty"`
}
// These metrics are experimental and are often added in an adhoc manner. There
// are no guarantees for these metrics and they maybe removed in the future without
// any notice.
type BundleDeployExperimental struct {
// Number of configuration files in the bundle.
ConfigurationFileCount int64 `json:"configuration_file_count,omitempty"`
// Size in bytes of the Terraform state file
TerraformStateSizeBytes int64 `json:"terraform_state_size_bytes,omitempty"`
// Number of variables in the bundle
VariableCount int64 `json:"variable_count,omitempty"`
ComplexVariableCount int64 `json:"complex_variable_count,omitempty"`
LookupVariableCount int64 `json:"lookup_variable_count,omitempty"`
// Number of targets in the bundle
TargetCount int64 `json:"target_count,omitempty"`
// Whether a field is set or not. If a configuration field is not present in this
// map then it is not tracked by this field.
// Keys are the full path of the field in the configuration tree.
// Examples: "bundle.terraform.exec_path", "bundle.git.branch" etc.
SetFields []BoolMapEntry `json:"set_fields,omitempty"`
// Values for boolean configuration fields like `experimental.python_wheel_wrapper`
// We don't need to define protos to track boolean values and can simply write those
// values to this map to track them.
BoolValues []BoolMapEntry `json:"bool_values,omitempty"`
BundleMode BundleMode `json:"bundle_mode,omitempty"`
WorkspaceArtifactPathType BundleDeployArtifactPathType `json:"workspace_artifact_path_type,omitempty"`
// Execution time per mutator for a selected subset of mutators.
BundleMutatorExecutionTimeMs []IntMapEntry `json:"bundle_mutator_execution_time_ms,omitempty"`
}
type BoolMapEntry struct {
Key string `json:"key,omitempty"`
Value bool `json:"value,omitempty"`
}
type IntMapEntry struct {
Key string `json:"key,omitempty"`
Value int64 `json:"value,omitempty"`
}

View File

@ -0,0 +1,37 @@
package protos
type BundleInitEvent struct {
// UUID associated with the DAB itself. This is serialized into the DAB
// when a user runs `databricks bundle init` and all subsequent deployments of
// that DAB can then be associated with this init event.
BundleUuid string `json:"bundle_uuid,omitempty"`
// Name of the template initialized when the user ran `databricks bundle init`
// This is only populated when the template is a first party template like
// mlops-stacks or default-python.
TemplateName string `json:"template_name,omitempty"`
// Arguments used by the user to initialize the template. Only enum
// values will be set here by the Databricks CLI.
//
// We use a generic map representation here because a bundle template's args are
// managed in the template itself and maintaining a copy typed schema for it here
// will be untenable in the long term.
TemplateEnumArgs []BundleInitTemplateEnumArg `json:"template_enum_args,omitempty"`
}
type BundleInitTemplateEnumArg struct {
// Valid key values for the template. These correspond to the keys specified in
// the "properties" section of the `databricks_template_schema.json` file.
//
// Note: `databricks_template_schema.json` contains a JSON schema type specification
// for the arguments that the template accepts.
Key string `json:"key"`
// Value that the user set for the field. This is only populated for properties
// that have the "enum" field specified in the JSON schema type specification.
//
// The Databricks CLI ensures that the value here is one of the "enum" values from
// the template specification.
Value string `json:"value"`
}

View File

@ -0,0 +1,35 @@
package protos
type ExecutionContext struct {
// UUID generated by the CLI for every CLI command run. This is also set in the HTTP user
// agent under the key "cmd-exec-id" and can be used to correlate frontend_log table
// with the http_access_log table.
CmdExecID string `json:"cmd_exec_id,omitempty"`
// Version of the Databricks CLI used.
Version string `json:"version,omitempty"`
// Command that was run by the user. Eg: bundle_deploy, fs_cp etc.
Command string `json:"command,omitempty"`
// Lowercase string name for the operating system. Same value
// as the one set in `runtime.GOOS` in Golang.
OperatingSystem string `json:"operating_system,omitempty"`
// Version of DBR from which CLI is being run.
// Only set when the CLI is being run from a Databricks cluster.
DbrVersion string `json:"dbr_version,omitempty"`
// If true, the CLI is being run from a Databricks notebook / cluster web terminal.
FromWebTerminal bool `json:"from_web_terminal,omitempty"`
// Time taken for the CLI command to execute.
ExecutionTimeMs int64 `json:"execution_time_ms,omitempty"`
// Exit code of the CLI command.
ExitCode int64 `json:"exit_code,omitempty"`
}
type CliTestEvent struct {
Name DummyCliEnum `json:"name,omitempty"`
}

View File

@ -0,0 +1,26 @@
package protos
type DummyCliEnum string
const (
DummyCliEnumUnspecified DummyCliEnum = "DUMMY_CLI_ENUM_UNSPECIFIED"
DummyCliEnumValue1 DummyCliEnum = "VALUE1"
DummyCliEnumValue2 DummyCliEnum = "VALUE2"
DummyCliEnumValue3 DummyCliEnum = "VALUE3"
)
type BundleMode string
const (
BundleModeUnspecified BundleMode = "TYPE_UNSPECIFIED"
BundleModeDevelopment BundleMode = "DEVELOPMENT"
BundleModeProduction BundleMode = "PRODUCTION"
)
type BundleDeployArtifactPathType string
const (
BundleDeployArtifactPathTypeUnspecified BundleDeployArtifactPathType = "TYPE_UNSPECIFIED"
BundleDeployArtifactPathTypeWorkspace BundleDeployArtifactPathType = "WORKSPACE_FILE_SYSTEM"
BundleDeployArtifactPathTypeVolume BundleDeployArtifactPathType = "UC_VOLUME"
)

View File

@ -0,0 +1,22 @@
package protos
// This corresponds to the FrontendLog lumberjack proto in universe.
// FrontendLog is the top-level struct for any client-side logs at Databricks.
type FrontendLog struct {
// A UUID for the log event generated from the CLI.
FrontendLogEventID string `json:"frontend_log_event_id,omitempty"`
Entry FrontendLogEntry `json:"entry,omitempty"`
}
type FrontendLogEntry struct {
DatabricksCliLog DatabricksCliLog `json:"databricks_cli_log,omitempty"`
}
type DatabricksCliLog struct {
ExecutionContext *ExecutionContext `json:"execution_context,omitempty"`
CliTestEvent *CliTestEvent `json:"cli_test_event,omitempty"`
BundleInitEvent *BundleInitEvent `json:"bundle_init_event,omitempty"`
BundleDeployEvent *BundleDeployEvent `json:"bundle_deploy_event,omitempty"`
}

View File

@ -18,6 +18,7 @@ type Server struct {
func New(t testutil.TestingT) *Server {
mux := http.NewServeMux()
server := httptest.NewServer(mux)
t.Cleanup(server.Close)
return &Server{
Server: server,
@ -28,10 +29,6 @@ func New(t testutil.TestingT) *Server {
type HandlerFunc func(req *http.Request) (resp any, err error)
func (s *Server) Close() {
s.Server.Close()
}
func (s *Server) Handle(pattern string, handler HandlerFunc) {
s.Mux.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) {
resp, err := handler(r)