Autogenerated documentation for bundle config (#2033)

## Changes

Documentation autogeneration tool. This tool uses same annotations_*.yml
files as in json-schema

Result will go
[there](https://docs.databricks.com/en/dev-tools/bundles/reference.html)
and
[there](https://docs.databricks.com/en/dev-tools/bundles/resources.html#cluster)

## Tests
Manually
This commit is contained in:
Ilya Kuznetsov 2025-01-29 13:14:21 +01:00 committed by GitHub
parent 30f57d3b49
commit 708c4fbb7a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 11065 additions and 367 deletions

View File

@ -48,6 +48,9 @@ vendor:
schema:
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
docs:
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
integration:
@ -56,4 +59,4 @@ integration:
integration-short:
$(INTEGRATION) -short
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs

79
bundle/docsgen/README.md Normal file
View File

@ -0,0 +1,79 @@
## docs-autogen
1. Install [Golang](https://go.dev/doc/install)
2. Run `make vendor docs` from the repo
3. See generated documents in `./bundle/docsgen/output` directory
4. To change descriptions update content in `./bundle/internal/schema/annotations.yml` or `./bundle/internal/schema/annotations_openapi_overrides.yml` and re-run `make docs`
For simpler usage run it together with copy command to move resulting files to local `docs` repo. Note that it will overwrite any local changes in affected files. Example:
```
make docs && cp bundle/docgen/output/*.md ../docs/source/dev-tools/bundles
```
To change intro sections for files update them in `templates/` directory
### Annotation file structure
```yaml
"<root-type-name>":
"<property-name>":
description: Description of the property, only plain text is supported
markdown_description: Description with markdown support, if defined it will override the value in docs and in JSON-schema
markdown_examples: Custom block for any example, in free form, Markdown is supported
title: JSON-schema title, not used in docs
default: Default value of the property, not used in docs
enum: Possible values of enum-type, not used in docs
```
Descriptions with `PLACEHOLDER` value are not displayed in docs and JSON-schema
All relative links like `[_](/dev-tools/bundles/settings.md#cluster_id)` are kept as is in docs but converted to absolute links in JSON schema
To change description for type itself (not its fields) use `"_"`:
```yaml
github.com/databricks/cli/bundle/config/resources.Cluster:
"_":
"markdown_description": |-
The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create).
```
### Example annotation
```yaml
github.com/databricks/cli/bundle/config.Bundle:
"cluster_id":
"description": |-
The ID of a cluster to use to run the bundle.
"markdown_description": |-
The ID of a cluster to use to run the bundle. See [_](/dev-tools/bundles/settings.md#cluster_id).
"compute_id":
"description": |-
PLACEHOLDER
"databricks_cli_version":
"description": |-
The Databricks CLI version to use for the bundle.
"markdown_description": |-
The Databricks CLI version to use for the bundle. See [_](/dev-tools/bundles/settings.md#databricks_cli_version).
"deployment":
"description": |-
The definition of the bundle deployment
"markdown_description": |-
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
"git":
"description": |-
The Git version control details that are associated with your bundle.
"markdown_description": |-
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
"name":
"description": |-
The name of the bundle.
"uuid":
"description": |-
PLACEHOLDER
```
### TODO
Add file watcher to track changes in the annotation files and re-run `make docs` script automtically

135
bundle/docsgen/main.go Normal file
View File

@ -0,0 +1,135 @@
package main
import (
"fmt"
"log"
"os"
"path"
"reflect"
"strings"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/jsonschema"
)
const (
rootFileName = "reference.md"
resourcesFileName = "resources.md"
)
func main() {
if len(os.Args) != 3 {
fmt.Println("Usage: go run main.go <annotation-file> <output-file>")
os.Exit(1)
}
annotationDir := os.Args[1]
docsDir := os.Args[2]
outputDir := path.Join(docsDir, "output")
templatesDir := path.Join(docsDir, "templates")
if _, err := os.Stat(outputDir); os.IsNotExist(err) {
if err := os.MkdirAll(outputDir, 0o755); err != nil {
log.Fatal(err)
}
}
rootHeader, err := os.ReadFile(path.Join(templatesDir, rootFileName))
if err != nil {
log.Fatal(err)
}
err = generateDocs(
[]string{path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, rootFileName),
reflect.TypeOf(config.Root{}),
string(rootHeader),
)
if err != nil {
log.Fatal(err)
}
resourcesHeader, err := os.ReadFile(path.Join(templatesDir, resourcesFileName))
if err != nil {
log.Fatal(err)
}
err = generateDocs(
[]string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, resourcesFileName),
reflect.TypeOf(config.Resources{}),
string(resourcesHeader),
)
if err != nil {
log.Fatal(err)
}
}
func generateDocs(inputPaths []string, outputPath string, rootType reflect.Type, header string) error {
annotations, err := annotation.LoadAndMerge(inputPaths)
if err != nil {
log.Fatal(err)
}
// schemas is used to resolve references to schemas
schemas := map[string]*jsonschema.Schema{}
// ownFields is used to track fields that are defined in the annotation file and should be included in the docs page
ownFields := map[string]bool{}
s, err := jsonschema.FromType(rootType, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
_, isOwnField := annotations[jsonschema.TypePath(typ)]
if isOwnField {
ownFields[jsonschema.TypePath(typ)] = true
}
refPath := getPath(typ)
shouldHandle := strings.HasPrefix(refPath, "github.com")
if !shouldHandle {
schemas[jsonschema.TypePath(typ)] = &s
return s
}
a := annotations[refPath]
if a == nil {
a = map[string]annotation.Descriptor{}
}
rootTypeAnnotation, ok := a["_"]
if ok {
assignAnnotation(&s, rootTypeAnnotation)
}
for k, v := range s.Properties {
assignAnnotation(v, a[k])
}
schemas[jsonschema.TypePath(typ)] = &s
return s
},
})
if err != nil {
log.Fatal(err)
}
nodes := buildNodes(s, schemas, ownFields)
err = buildMarkdown(nodes, outputPath, header)
if err != nil {
log.Fatal(err)
}
return nil
}
func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
if a.Description != "" && a.Description != annotation.Placeholder {
s.Description = a.Description
}
if a.MarkdownDescription != "" {
s.MarkdownDescription = a.MarkdownDescription
}
if a.MarkdownExamples != "" {
s.Examples = []any{a.MarkdownExamples}
}
}

View File

@ -0,0 +1,99 @@
package main
import (
"fmt"
"log"
"os"
"strings"
)
func buildMarkdown(nodes []rootNode, outputFile, header string) error {
m := newMardownRenderer()
m = m.PlainText(header)
for _, node := range nodes {
m = m.LF()
if node.TopLevel {
m = m.H2(node.Title)
} else {
m = m.H3(node.Title)
}
m = m.LF()
if node.Type != "" {
m = m.PlainText(fmt.Sprintf("**`Type: %s`**", node.Type))
m = m.LF()
}
m = m.PlainText(node.Description)
m = m.LF()
if len(node.ObjectKeyAttributes) > 0 {
n := pickLastWord(node.Title)
n = removePluralForm(n)
m = m.CodeBlocks("yaml", fmt.Sprintf("%ss:\n <%s-name>:\n <%s-field-name>: <%s-field-value>", n, n, n, n))
m = m.LF()
m = buildAttributeTable(m, node.ObjectKeyAttributes)
} else if len(node.ArrayItemAttributes) > 0 {
m = m.LF()
m = buildAttributeTable(m, node.ArrayItemAttributes)
} else if len(node.Attributes) > 0 {
m = m.LF()
m = buildAttributeTable(m, node.Attributes)
}
if node.Example != "" {
m = m.LF()
m = m.PlainText("**Example**")
m = m.LF()
m = m.PlainText(node.Example)
}
}
f, err := os.Create(outputFile)
if err != nil {
log.Fatal(err)
}
_, err = f.WriteString(m.String())
if err != nil {
log.Fatal(err)
}
return f.Close()
}
func pickLastWord(s string) string {
words := strings.Split(s, ".")
return words[len(words)-1]
}
// Build a custom table which we use in Databricks website
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
m = m.LF()
m = m.PlainText(".. list-table::")
m = m.PlainText(" :header-rows: 1")
m = m.LF()
m = m.PlainText(" * - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.LF()
for _, a := range attributes {
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.LF()
}
return m
}
func formatDescription(a attributeNode) string {
s := strings.ReplaceAll(a.Description, "\n", " ")
if a.Link != "" {
if strings.HasSuffix(s, ".") {
s += " "
} else if s != "" {
s += ". "
}
s += fmt.Sprintf("See [_](#%s).", a.Link)
}
return s
}

228
bundle/docsgen/nodes.go Normal file
View File

@ -0,0 +1,228 @@
package main
import (
"sort"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
// rootNode is an intermediate representation of resolved JSON-schema item that is used to generate documentation
// Every schema node goes follows this conversion `JSON-schema -> rootNode -> markdown text`
type rootNode struct {
Title string
Description string
Attributes []attributeNode
Example string
ObjectKeyAttributes []attributeNode
ArrayItemAttributes []attributeNode
TopLevel bool
Type string
}
type attributeNode struct {
Title string
Type string
Description string
Link string
}
type rootProp struct {
// k is the name of the property
k string
// v is the corresponding json-schema node
v *jsonschema.Schema
// topLevel is true only for direct properties of the schema of root type (e.g. config.Root or config.Resources)
// Example: config.Root has .
topLevel bool
// circular indicates if property was added by recursive type, e.g. task.for_each_task.task.for_each_task
// These entries don't expand further and don't add any new nodes from their properties
circular bool
}
const MapType = "Map"
// buildNodes converts JSON-schema to a flat list of rootNode items that are then used to generate markdown documentation
// It recursively traverses the schema expanding the resulting list with new items for every properties of nodes `object` and `array` type
func buildNodes(s jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFields map[string]bool) []rootNode {
rootProps := []rootProp{}
for k, v := range s.Properties {
rootProps = append(rootProps, rootProp{k, v, true, false})
}
nodes := make([]rootNode, 0, len(rootProps))
visited := make(map[string]bool)
for i := 0; i < len(rootProps); i++ {
item := rootProps[i]
k := item.k
v := item.v
if visited[k] {
continue
}
visited[k] = true
v = resolveRefs(v, refs)
node := rootNode{
Title: k,
Description: getDescription(v, item.topLevel),
TopLevel: item.topLevel,
Example: getExample(v),
Type: getHumanReadableType(v.Type),
}
hasProperties := len(v.Properties) > 0
if hasProperties {
node.Attributes = getAttributes(v.Properties, refs, ownFields, k, item.circular)
}
mapValueType := getMapValueType(v, refs)
if mapValueType != nil {
d := getDescription(mapValueType, true)
if d != "" {
node.Description = d
}
if node.Example == "" {
node.Example = getExample(mapValueType)
}
node.ObjectKeyAttributes = getAttributes(mapValueType.Properties, refs, ownFields, getMapKeyPrefix(k), item.circular)
}
arrayItemType := resolveRefs(v.Items, refs)
if arrayItemType != nil {
node.ArrayItemAttributes = getAttributes(arrayItemType.Properties, refs, ownFields, k, item.circular)
}
nodes = append(nodes, node)
// Whether we should add new root props from the children of the current JSON-schema node to include their definitions to this document
shouldAddNewProps := !item.circular
if shouldAddNewProps {
newProps := []rootProp{}
// Adds node with definition for the properties. Example:
// bundle:
// prop-name: <value>
if hasProperties {
newProps = append(newProps, extractNodes(k, v.Properties, refs, ownFields)...)
}
// Adds node with definition for the type of array item. Example:
// permissions:
// - <item>
if arrayItemType != nil {
newProps = append(newProps, extractNodes(k, arrayItemType.Properties, refs, ownFields)...)
}
// Adds node with definition for the type of the Map value. Example:
// targets:
// <key>: <value>
if mapValueType != nil {
newProps = append(newProps, extractNodes(getMapKeyPrefix(k), mapValueType.Properties, refs, ownFields)...)
}
rootProps = append(rootProps, newProps...)
}
}
sort.Slice(nodes, func(i, j int) bool {
return nodes[i].Title < nodes[j].Title
})
return nodes
}
func getMapValueType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema) *jsonschema.Schema {
additionalProps, ok := v.AdditionalProperties.(*jsonschema.Schema)
if ok {
return resolveRefs(additionalProps, refs)
}
return nil
}
func getMapKeyPrefix(s string) string {
return s + ".<name>"
}
func removePluralForm(s string) string {
if strings.HasSuffix(s, "s") {
return strings.TrimSuffix(s, "s")
}
return s
}
func getHumanReadableType(t jsonschema.Type) string {
typesMapping := map[string]string{
"string": "String",
"integer": "Integer",
"boolean": "Boolean",
"array": "Sequence",
"object": "Map",
}
return typesMapping[string(t)]
}
func getAttributes(props, refs map[string]*jsonschema.Schema, ownFields map[string]bool, prefix string, circular bool) []attributeNode {
attributes := []attributeNode{}
for k, v := range props {
v = resolveRefs(v, refs)
typeString := getHumanReadableType(v.Type)
if typeString == "" {
typeString = "Any"
}
var reference string
if isReferenceType(v, refs, ownFields) && !circular {
reference = prefix + "." + k
}
attributes = append(attributes, attributeNode{
Title: k,
Type: typeString,
Description: getDescription(v, true),
Link: reference,
})
}
sort.Slice(attributes, func(i, j int) bool {
return attributes[i].Title < attributes[j].Title
})
return attributes
}
func getDescription(s *jsonschema.Schema, allowMarkdown bool) string {
if allowMarkdown && s.MarkdownDescription != "" {
return s.MarkdownDescription
}
return s.Description
}
func shouldExtract(ref string, ownFields map[string]bool) bool {
if i := strings.Index(ref, "github.com"); i >= 0 {
ref = ref[i:]
}
_, isCustomField := ownFields[ref]
return isCustomField
}
// extractNodes returns a list of rootProp items for all properties of the json-schema node that should be extracted based on context
// E.g. we extract all propert
func extractNodes(prefix string, props, refs map[string]*jsonschema.Schema, ownFields map[string]bool) []rootProp {
nodes := []rootProp{}
for k, v := range props {
if v.Reference != nil && !shouldExtract(*v.Reference, ownFields) {
continue
}
v = resolveRefs(v, refs)
if v.Type == "object" || v.Type == "array" {
nodes = append(nodes, rootProp{prefix + "." + k, v, false, isCycleField(k)})
}
}
return nodes
}
func isCycleField(field string) bool {
return field == "for_each_task"
}
func getExample(v *jsonschema.Schema) string {
examples := v.Examples
if len(examples) == 0 {
return ""
}
return examples[0].(string)
}

View File

@ -0,0 +1,120 @@
package main
import (
"testing"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
)
func TestBuildNodes_ChildExpansion(t *testing.T) {
tests := []struct {
name string
schema jsonschema.Schema
refs map[string]*jsonschema.Schema
ownFields map[string]bool
wantNodes []rootNode
}{
{
name: "array expansion",
schema: jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"list": {
Type: "array",
Items: &jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"listSub": {Reference: strPtr("#/$defs/github.com/listSub")},
},
},
},
},
},
refs: map[string]*jsonschema.Schema{
"github.com/listSub": {Type: "array", Items: &jsonschema.Schema{Type: "object", Properties: map[string]*jsonschema.Schema{"subField": {Type: "string"}}}},
},
ownFields: map[string]bool{"github.com/listSub": true},
wantNodes: []rootNode{
{
Title: "list",
TopLevel: true,
Type: "Sequence",
ArrayItemAttributes: []attributeNode{
{Title: "listSub", Type: "Sequence", Link: "list.listSub"},
},
},
{
Title: "list.listSub",
Type: "Sequence",
ArrayItemAttributes: []attributeNode{
{Title: "subField", Type: "String"},
},
},
},
},
{
name: "map expansion",
schema: jsonschema.Schema{
Type: "object",
Properties: map[string]*jsonschema.Schema{
"myMap": {
Type: "object",
AdditionalProperties: &jsonschema.Schema{
Reference: strPtr("#/$defs/github.com/myMap"),
Properties: map[string]*jsonschema.Schema{
"mapSub": {Type: "object", Reference: strPtr("#/$defs/github.com/mapSub")},
},
},
},
},
},
refs: map[string]*jsonschema.Schema{
"github.com/myMap": {
Type: "object",
Properties: map[string]*jsonschema.Schema{
"mapSub": {Type: "boolean", Reference: strPtr("#/$defs/github.com/mapSub")},
},
},
"github.com/mapSub": {
Type: "object",
Properties: map[string]*jsonschema.Schema{
"deepSub": {Type: "boolean"},
},
},
},
ownFields: map[string]bool{
"github.com/myMap": true,
"github.com/mapSub": true,
},
wantNodes: []rootNode{
{
Title: "myMap",
TopLevel: true,
Type: "Map",
ObjectKeyAttributes: []attributeNode{
{Title: "mapSub", Type: "Map", Link: "myMap.<name>.mapSub"},
},
},
{
Title: "myMap.<name>.mapSub",
Type: "Map",
Attributes: []attributeNode{
{Title: "deepSub", Type: "Boolean"},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := buildNodes(tt.schema, tt.refs, tt.ownFields)
assert.Equal(t, tt.wantNodes, got)
})
}
}
func strPtr(s string) *string {
return &s
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

97
bundle/docsgen/refs.go Normal file
View File

@ -0,0 +1,97 @@
package main
import (
"log"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
func isReferenceType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFields map[string]bool) bool {
if v.Type != "object" && v.Type != "array" {
return false
}
if len(v.Properties) > 0 {
return true
}
if v.Items != nil {
items := resolveRefs(v.Items, refs)
if items != nil && items.Type == "object" {
return true
}
}
props := resolveAdditionalProperties(v)
if !isInOwnFields(props, ownFields) {
return false
}
if props != nil {
propsResolved := resolveRefs(props, refs)
return propsResolved.Type == "object"
}
return false
}
func isInOwnFields(node *jsonschema.Schema, ownFields map[string]bool) bool {
if node != nil && node.Reference != nil {
return ownFields[getRefType(node)]
}
return true
}
func resolveAdditionalProperties(v *jsonschema.Schema) *jsonschema.Schema {
if v.AdditionalProperties == nil {
return nil
}
additionalProps, ok := v.AdditionalProperties.(*jsonschema.Schema)
if !ok {
return nil
}
return additionalProps
}
func resolveRefs(s *jsonschema.Schema, schemas map[string]*jsonschema.Schema) *jsonschema.Schema {
if s == nil {
return nil
}
node := s
description := s.Description
markdownDescription := s.MarkdownDescription
examples := s.Examples
for node.Reference != nil {
ref := getRefType(node)
newNode, ok := schemas[ref]
if !ok {
log.Printf("schema %s not found", ref)
break
}
if description == "" {
description = newNode.Description
}
if markdownDescription == "" {
markdownDescription = newNode.MarkdownDescription
}
if len(examples) == 0 {
examples = newNode.Examples
}
node = newNode
}
newNode := *node
newNode.Description = description
newNode.MarkdownDescription = markdownDescription
newNode.Examples = examples
return &newNode
}
func getRefType(node *jsonschema.Schema) string {
if node.Reference == nil {
return ""
}
return strings.TrimPrefix(*node.Reference, "#/$defs/")
}

View File

@ -0,0 +1,51 @@
package main
import (
"fmt"
"runtime"
"strings"
)
type markdownRenderer struct {
nodes []string
}
func newMardownRenderer() *markdownRenderer {
return &markdownRenderer{}
}
func (m *markdownRenderer) add(s string) *markdownRenderer {
m.nodes = append(m.nodes, s)
return m
}
func (m *markdownRenderer) PlainText(s string) *markdownRenderer {
return m.add(s)
}
func (m *markdownRenderer) LF() *markdownRenderer {
return m.add(" ")
}
func (m *markdownRenderer) H2(s string) *markdownRenderer {
return m.add("## " + s)
}
func (m *markdownRenderer) H3(s string) *markdownRenderer {
return m.add("### " + s)
}
func (m *markdownRenderer) CodeBlocks(lang, s string) *markdownRenderer {
return m.add(fmt.Sprintf("```%s%s%s%s```", lang, lineFeed(), s, lineFeed()))
}
func (m *markdownRenderer) String() string {
return strings.Join(m.nodes, lineFeed())
}
func lineFeed() string {
if runtime.GOOS == "windows" {
return "\r\n"
}
return "\n"
}

View File

@ -0,0 +1,10 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
# Configuration reference
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).
For complete bundle examples, see [_](/dev-tools/bundles/resource-examples.md) and the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples).

View File

@ -0,0 +1,70 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
---
# <DABS> resources
<DABS> allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).
This article outlines supported resource types for bundles and provides details and an example for each supported type. For additional examples, see [_](/dev-tools/bundles/resource-examples.md).
## <a id="resource-types"></a> Supported resources
The following table lists supported resource types for bundles. Some resources can be created by defining them in a bundle and deploying the bundle, and some resources only support referencing an existing resource to include in the bundle.
Resources are defined using the corresponding [Databricks REST API](/api/workspace/introduction) object's create operation request payload, where the object's supported fields, expressed as YAML, are the resource's supported properties. Links to documentation for each resource's corresponding payloads are listed in the table.
.. tip:: The `databricks bundle validate` command returns warnings if unknown resource properties are found in bundle configuration files.
.. list-table::
:header-rows: 1
* - Resource
- Create support
- Corresponding REST API object
* - [cluster](#cluster)
- ✓
- [Cluster object](/api/workspace/clusters/create)
* - [dashboard](#dashboard)
-
- [Dashboard object](/api/workspace/lakeview/create)
* - [experiment](#experiment)
- ✓
- [Experiment object](/api/workspace/experiments/createexperiment)
* - [job](#job)
- ✓
- [Job object](/api/workspace/jobs/create)
* - [model (legacy)](#model-legacy)
- ✓
- [Model (legacy) object](/api/workspace/modelregistry/createmodel)
* - [model_serving_endpoint](#model-serving-endpoint)
- ✓
- [Model serving endpoint object](/api/workspace/servingendpoints/create)
* - [pipeline](#pipeline)
- ✓
- [Pipeline object](/api/workspace/pipelines/create)
* - [quality_monitor](#quality-monitor)
- ✓
- [Quality monitor object](/api/workspace/qualitymonitors/create)
* - [registered_model](#registered-model) (<UC>)
- ✓
- [Registered model object](/api/workspace/registeredmodels/create)
* - [schema](#schema) (<UC>)
- ✓
- [Schema object](/api/workspace/schemas/create)
* - [volume](#volume) (<UC>)
- ✓
- [Volume object](/api/workspace/volumes/create)

View File

@ -0,0 +1,12 @@
package annotation
type Descriptor struct {
Description string `json:"description,omitempty"`
MarkdownDescription string `json:"markdown_description,omitempty"`
Title string `json:"title,omitempty"`
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
MarkdownExamples string `json:"markdown_examples,omitempty"`
}
const Placeholder = "PLACEHOLDER"

View File

@ -0,0 +1,44 @@
package annotation
import (
"bytes"
"os"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
)
// Parsed file with annotations, expected format:
// github.com/databricks/cli/bundle/config.Bundle:
//
// cluster_id:
// description: "Description"
type File map[string]map[string]Descriptor
func LoadAndMerge(sources []string) (File, error) {
prev := dyn.NilValue
for _, path := range sources {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
generated, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b))
if err != nil {
return nil, err
}
prev, err = merge.Merge(prev, generated)
if err != nil {
return nil, err
}
}
var data File
err := convert.ToTyped(&data, prev)
if err != nil {
return nil, err
}
return data, nil
}

View File

@ -11,6 +11,7 @@ import (
yaml3 "gopkg.in/yaml.v3"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
@ -19,60 +20,23 @@ import (
"github.com/databricks/cli/libs/jsonschema"
)
type annotation struct {
Description string `json:"description,omitempty"`
MarkdownDescription string `json:"markdown_description,omitempty"`
Title string `json:"title,omitempty"`
Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"`
}
type annotationHandler struct {
// Annotations read from all annotation files including all overrides
parsedAnnotations annotationFile
parsedAnnotations annotation.File
// Missing annotations for fields that are found in config that need to be added to the annotation file
missingAnnotations annotationFile
missingAnnotations annotation.File
}
/**
* Parsed file with annotations, expected format:
* github.com/databricks/cli/bundle/config.Bundle:
* cluster_id:
* description: "Description"
*/
type annotationFile map[string]map[string]annotation
const Placeholder = "PLACEHOLDER"
// Adds annotations to the JSON schema reading from the annotation files.
// More details https://json-schema.org/understanding-json-schema/reference/annotations
func newAnnotationHandler(sources []string) (*annotationHandler, error) {
prev := dyn.NilValue
for _, path := range sources {
b, err := os.ReadFile(path)
data, err := annotation.LoadAndMerge(sources)
if err != nil {
return nil, err
}
generated, err := yamlloader.LoadYAML(path, bytes.NewBuffer(b))
if err != nil {
return nil, err
}
prev, err = merge.Merge(prev, generated)
if err != nil {
return nil, err
}
}
var data annotationFile
err := convert.ToTyped(&data, prev)
if err != nil {
return nil, err
}
d := &annotationHandler{}
d.parsedAnnotations = data
d.missingAnnotations = annotationFile{}
d.missingAnnotations = annotation.File{}
return d, nil
}
@ -85,7 +49,7 @@ func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema
annotations := d.parsedAnnotations[refPath]
if annotations == nil {
annotations = map[string]annotation{}
annotations = map[string]annotation.Descriptor{}
}
rootTypeAnnotation, ok := annotations[RootTypeKey]
@ -96,11 +60,11 @@ func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema
for k, v := range s.Properties {
item := annotations[k]
if item.Description == "" {
item.Description = Placeholder
item.Description = annotation.Placeholder
emptyAnnotations := d.missingAnnotations[refPath]
if emptyAnnotations == nil {
emptyAnnotations = map[string]annotation{}
emptyAnnotations = map[string]annotation.Descriptor{}
d.missingAnnotations[refPath] = emptyAnnotations
}
emptyAnnotations[k] = item
@ -124,7 +88,7 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error
for k := range d.missingAnnotations {
if !isCliPath(k) {
delete(d.missingAnnotations, k)
fmt.Printf("Missing annotations for `%s` that are not in CLI package, try to fetch latest OpenAPI spec and regenerate annotations", k)
fmt.Printf("Missing annotations for `%s` that are not in CLI package, try to fetch latest OpenAPI spec and regenerate annotations\n", k)
}
}
@ -138,7 +102,7 @@ func (d *annotationHandler) syncWithMissingAnnotations(outputPath string) error
return err
}
var outputTyped annotationFile
var outputTyped annotation.File
err = convert.ToTyped(&outputTyped, output)
if err != nil {
return err
@ -155,8 +119,8 @@ func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assignAnnotation(s *jsonschema.Schema, a annotation) {
if a.Description != Placeholder {
func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
if a.Description != annotation.Placeholder {
s.Description = a.Description
}
@ -168,7 +132,7 @@ func assignAnnotation(s *jsonschema.Schema, a annotation) {
s.Enum = a.Enum
}
func saveYamlWithStyle(outputPath string, annotations annotationFile) error {
func saveYamlWithStyle(outputPath string, annotations annotation.File) error {
annotationOrder := yamlsaver.NewOrder([]string{"description", "markdown_description", "title", "default", "enum"})
style := map[string]yaml3.Style{}
@ -220,15 +184,17 @@ func convertLinksToAbsoluteUrl(s string) string {
referencePage := "/dev-tools/bundles/reference.html"
// Regular expression to match Markdown-style links like [_](link)
re := regexp.MustCompile(`\[_\]\(([^)]+)\)`)
re := regexp.MustCompile(`\[(.*?)\]\((.*?)\)`)
result := re.ReplaceAllStringFunc(s, func(match string) string {
matches := re.FindStringSubmatch(match)
if len(matches) < 2 {
return match
}
link := matches[1]
var text, absoluteURL string
originalText := matches[1]
link := matches[2]
var text, absoluteURL string
if strings.HasPrefix(link, "#") {
text = strings.TrimPrefix(link, "#")
absoluteURL = fmt.Sprintf("%s%s%s", base, referencePage, link)
@ -246,6 +212,10 @@ func convertLinksToAbsoluteUrl(s string) string {
return match
}
if originalText != "_" {
text = originalText
}
return fmt.Sprintf("[%s](%s)", text, absoluteURL)
})

View File

@ -1,31 +1,25 @@
github.com/databricks/cli/bundle/config.Artifact:
"build":
"description": |-
An optional set of non-default build commands that you want to run locally before deployment.
For Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.
To specify multiple build commands, separate each command with double-ampersand (&&) characters.
An optional set of non-default build commands to run locally before deployment.
"executable":
"description": |-
The executable type.
The executable type. Valid values are `bash`, `sh`, and `cmd`.
"files":
"description": |-
The source files for the artifact.
"markdown_description": |-
The source files for the artifact, defined as an [_](#artifact_file).
"path":
"description": |-
The location where the built artifact will be saved.
"type":
"description": |-
The type of the artifact.
Required. The type of the artifact.
"markdown_description": |-
The type of the artifact. Valid values are `wheel` or `jar`
Required. The type of the artifact. Valid values are `whl`.
github.com/databricks/cli/bundle/config.ArtifactFile:
"source":
"description": |-
The path of the files used to build the artifact.
Required. The path of the files used to build the artifact.
github.com/databricks/cli/bundle/config.Bundle:
"cluster_id":
"description": |-
@ -44,12 +38,12 @@ github.com/databricks/cli/bundle/config.Bundle:
"description": |-
The definition of the bundle deployment
"markdown_description": |-
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
The definition of the bundle deployment. For supported attributes see [_](/dev-tools/bundles/deployment-modes.md).
"git":
"description": |-
The Git version control details that are associated with your bundle.
"markdown_description": |-
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
The Git version control details that are associated with your bundle. For supported attributes see [_](/dev-tools/bundles/settings.md#git).
"name":
"description": |-
The name of the bundle.
@ -63,8 +57,6 @@ github.com/databricks/cli/bundle/config.Deployment:
"lock":
"description": |-
The deployment lock attributes.
"markdown_description": |-
The deployment lock attributes. See [_](#lock).
github.com/databricks/cli/bundle/config.Experimental:
"pydabs":
"description": |-
@ -74,13 +66,13 @@ github.com/databricks/cli/bundle/config.Experimental:
Configures loading of Python code defined with 'databricks-bundles' package.
"python_wheel_wrapper":
"description": |-
Whether to use a Python wheel wrapper
Whether to use a Python wheel wrapper.
"scripts":
"description": |-
The commands to run
The commands to run.
"use_legacy_run_as":
"description": |-
Whether to use the legacy run_as behavior
Whether to use the legacy run_as behavior.
github.com/databricks/cli/bundle/config.Git:
"branch":
"description": |-
@ -152,66 +144,80 @@ github.com/databricks/cli/bundle/config.Resources:
PLACEHOLDER
"clusters":
"description": |-
The cluster definitions for the bundle.
The cluster definitions for the bundle, where each key is the name of a cluster.
"markdown_description": |-
The cluster definitions for the bundle. See [_](/dev-tools/bundles/resources.md#cluster)
The cluster definitions for the bundle, where each key is the name of a cluster. See [_](/dev-tools/bundles/resources.md#clusters)
"dashboards":
"description": |-
The dashboard definitions for the bundle.
The dashboard definitions for the bundle, where each key is the name of the dashboard.
"markdown_description": |-
The dashboard definitions for the bundle. See [_](/dev-tools/bundles/resources.md#dashboard)
The dashboard definitions for the bundle, where each key is the name of the dashboard. See [_](/dev-tools/bundles/resources.md#dashboards)
"experiments":
"description": |-
The experiment definitions for the bundle.
The experiment definitions for the bundle, where each key is the name of the experiment.
"markdown_description": |-
The experiment definitions for the bundle. See [_](/dev-tools/bundles/resources.md#experiment)
The experiment definitions for the bundle, where each key is the name of the experiment. See [_](/dev-tools/bundles/resources.md#experiments)
"jobs":
"description": |-
The job definitions for the bundle.
The job definitions for the bundle, where each key is the name of the job.
"markdown_description": |-
The job definitions for the bundle. See [_](/dev-tools/bundles/resources.md#job)
The job definitions for the bundle, where each key is the name of the job. See [_](/dev-tools/bundles/resources.md#jobs)
"model_serving_endpoints":
"description": |-
The model serving endpoint definitions for the bundle.
The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.
"markdown_description": |-
The model serving endpoint definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model_serving_endpoint)
The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [_](/dev-tools/bundles/resources.md#model_serving_endpoints)
"models":
"description": |-
The model definitions for the bundle.
The model definitions for the bundle, where each key is the name of the model.
"markdown_description": |-
The model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#model)
The model definitions for the bundle, where each key is the name of the model. See [_](/dev-tools/bundles/resources.md#models)
"pipelines":
"description": |-
The pipeline definitions for the bundle.
The pipeline definitions for the bundle, where each key is the name of the pipeline.
"markdown_description": |-
The pipeline definitions for the bundle. See [_](/dev-tools/bundles/resources.md#pipeline)
The pipeline definitions for the bundle, where each key is the name of the pipeline. See [_](/dev-tools/bundles/resources.md#pipelines)
"quality_monitors":
"description": |-
The quality monitor definitions for the bundle.
The quality monitor definitions for the bundle, where each key is the name of the quality monitor.
"markdown_description": |-
The quality monitor definitions for the bundle. See [_](/dev-tools/bundles/resources.md#quality_monitor)
The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [_](/dev-tools/bundles/resources.md#quality_monitors)
"registered_models":
"description": |-
The registered model definitions for the bundle.
The registered model definitions for the bundle, where each key is the name of the <UC> registered model.
"markdown_description": |-
The registered model definitions for the bundle. See [_](/dev-tools/bundles/resources.md#registered_model)
The registered model definitions for the bundle, where each key is the name of the <UC> registered model. See [_](/dev-tools/bundles/resources.md#registered_models)
"schemas":
"description": |-
The schema definitions for the bundle.
The schema definitions for the bundle, where each key is the name of the schema.
"markdown_description": |-
The schema definitions for the bundle. See [_](/dev-tools/bundles/resources.md#schema)
The schema definitions for the bundle, where each key is the name of the schema. See [_](/dev-tools/bundles/resources.md#schemas)
"volumes":
"description": |-
PLACEHOLDER
The volume definitions for the bundle, where each key is the name of the volume.
"markdown_description": |-
The volume definitions for the bundle, where each key is the name of the volume. See [_](/dev-tools/bundles/resources.md#volumes)
github.com/databricks/cli/bundle/config.Root:
"artifacts":
"description": |-
Defines the attributes to build an artifact
"markdown_description": |-
Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [_](/dev-tools/bundles/settings.md#artifacts).
Artifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [_](/dev-tools/bundles/artifact-overrides.md).
"markdown_examples": |-
```yaml
artifacts:
default:
type: whl
build: poetry build
path: .
```
"bundle":
"description": |-
The attributes of the bundle.
The bundle attributes when deploying to this target.
"markdown_description": |-
The attributes of the bundle. See [_](/dev-tools/bundles/settings.md#bundle)
The bundle attributes when deploying to this target,
"experimental":
"description": |-
Defines attributes for experimental features.
@ -222,9 +228,21 @@ github.com/databricks/cli/bundle/config.Root:
Specifies a list of path globs that contain configuration files to include within the bundle. See [_](/dev-tools/bundles/settings.md#include)
"permissions":
"description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle
Defines a permission for a specific entity.
"markdown_description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.
See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
"markdown_examples": |-
```yaml
permissions:
- level: CAN_VIEW
group_name: test-group
- level: CAN_MANAGE
user_name: someone@example.com
- level: CAN_RUN
service_principal_name: 123456-abcdef
```
"presets":
"description": |-
Defines bundle deployment presets.
@ -232,26 +250,39 @@ github.com/databricks/cli/bundle/config.Root:
Defines bundle deployment presets. See [_](/dev-tools/bundles/deployment-modes.md#presets).
"resources":
"description": |-
Specifies information about the Databricks resources used by the bundle
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.
"markdown_description": |-
Specifies information about the Databricks resources used by the bundle. See [_](/dev-tools/bundles/resources.md).
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about <DABS> supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
```yaml
resources:
<resource-type>:
<resource-name>:
<resource-field-name>: <resource-field-value>
```
"run_as":
"description": |-
The identity to use to run the bundle.
The identity to use when running <DABS> workflows.
"markdown_description": |-
The identity to use when running <DABS> workflows. See [_](/dev-tools/bundles/run-as.md).
"sync":
"description": |-
The files and file paths to include or exclude in the bundle.
"markdown_description": |-
The files and file paths to include or exclude in the bundle. See [_](/dev-tools/bundles/)
The files and file paths to include or exclude in the bundle. See [_](/dev-tools/bundles/settings.md#sync).
"targets":
"description": |-
Defines deployment targets for the bundle.
"markdown_description": |-
Defines deployment targets for the bundle. See [_](/dev-tools/bundles/settings.md#targets)
"variables":
"description": |-
A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.
"workspace":
"description": |-
Defines the Databricks workspace for the bundle.
"markdown_description": |-
Defines the Databricks workspace for the bundle. See [_](/dev-tools/bundles/settings.md#workspace).
github.com/databricks/cli/bundle/config.Sync:
"exclude":
"description": |-
@ -266,11 +297,9 @@ github.com/databricks/cli/bundle/config.Target:
"artifacts":
"description": |-
The artifacts to include in the target deployment.
"markdown_description": |-
The artifacts to include in the target deployment. See [_](#artifact)
"bundle":
"description": |-
The name of the bundle when deploying to this target.
The bundle attributes when deploying to this target.
"cluster_id":
"description": |-
The ID of the cluster to use for this target.
@ -283,8 +312,6 @@ github.com/databricks/cli/bundle/config.Target:
"git":
"description": |-
The Git version control settings for the target.
"markdown_description": |-
The Git version control settings for the target. See [_](#git).
"mode":
"description": |-
The deployment mode for the target.
@ -293,38 +320,26 @@ github.com/databricks/cli/bundle/config.Target:
"permissions":
"description": |-
The permissions for deploying and running the bundle in the target.
"markdown_description": |-
The permissions for deploying and running the bundle in the target. See [_](#permission).
"presets":
"description": |-
The deployment presets for the target.
"markdown_description": |-
The deployment presets for the target. See [_](#preset).
"resources":
"description": |-
The resource definitions for the target.
"markdown_description": |-
The resource definitions for the target. See [_](#resources).
"run_as":
"description": |-
The identity to use to run the bundle.
"markdown_description": |-
The identity to use to run the bundle. See [_](#job_run_as) and [_](/dev-tools/bundles/run_as.md).
The identity to use to run the bundle, see [_](/dev-tools/bundles/run-as.md).
"sync":
"description": |-
The local paths to sync to the target workspace when a bundle is run or deployed.
"markdown_description": |-
The local paths to sync to the target workspace when a bundle is run or deployed. See [_](#sync).
"variables":
"description": |-
The custom variable definitions for the target.
"markdown_description": |-
The custom variable definitions for the target. See [_](/dev-tools/bundles/settings.md#variables) and [_](/dev-tools/bundles/variables.md).
"workspace":
"description": |-
The Databricks workspace for the target.
"markdown_description": |-
The Databricks workspace for the target. [_](#workspace)
github.com/databricks/cli/bundle/config.Workspace:
"artifact_path":
"description": |-
@ -374,64 +389,6 @@ github.com/databricks/cli/bundle/config.Workspace:
"state_path":
"description": |-
The workspace state path
github.com/databricks/cli/bundle/config/resources.App:
"active_deployment":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
"compute_status":
"description": |-
PLACEHOLDER
"config":
"description": |-
PLACEHOLDER
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
"service_principal_id":
"description": |-
PLACEHOLDER
"service_principal_name":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Grant:
"principal":
"description": |-
@ -440,6 +397,11 @@ github.com/databricks/cli/bundle/config/resources.Grant:
"description": |-
The privileges to grant to the specified entity
github.com/databricks/cli/bundle/config/resources.Permission:
"-":
"description": |-
Defines a permission for a specific entity.
"markdown_description": |-
Defines a permission for a specific entity. See [_](/dev-tools/bundles/settings.md#permissions) and [_](/dev-tools/bundles/permissions.md).
"group_name":
"description": |-
The name of the group that has the permission set in level.
@ -506,6 +468,11 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
"description": |-
The type of the variable.
github.com/databricks/cli/bundle/config/variable.Variable:
"_":
"description": |-
Defines a custom variable for the bundle.
"markdown_description": |-
Defines a custom variable for the bundle. See [_](/dev-tools/bundles/settings.md#variables).
"default":
"description": |-
PLACEHOLDER
@ -516,107 +483,14 @@ github.com/databricks/cli/bundle/config/variable.Variable:
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"markdown_description": |-
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.
"type":
"description": |-
The type of the variable.
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs:
"service_principal_name":
"description": |-
PLACEHOLDER
"creator":
The application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.
"user_name":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"description":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"secret":
"description": |-
PLACEHOLDER
"serving_endpoint":
"description": |-
PLACEHOLDER
"sql_warehouse":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
The email of an active workspace user. Non-admin users can only set this field to their own email.

View File

@ -1,4 +1,7 @@
github.com/databricks/cli/bundle/config/resources.App:
"active_deployment":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
@ -8,9 +11,30 @@ github.com/databricks/cli/bundle/config/resources.App:
"config":
"description": |-
PLACEHOLDER
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
@ -23,7 +47,46 @@ github.com/databricks/cli/bundle/config/resources.App:
"source_code_path":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Cluster:
"_":
"markdown_description": |-
The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create).
"markdown_examples": |-
The following example creates a cluster named `my_cluster` and sets that as the cluster to use to run the notebook in `my_job`:
```yaml
bundle:
name: clusters
resources:
clusters:
my_cluster:
num_workers: 2
node_type_id: "i3.xlarge"
autoscale:
min_workers: 2
max_workers: 7
spark_version: "13.3.x-scala2.12"
spark_conf:
"spark.executor.memory": "2g"
jobs:
my_job:
tasks:
- task_key: test_task
notebook_task:
notebook_path: "./src/my_notebook.py"
```
"data_security_mode":
"description": |-
PLACEHOLDER
@ -43,6 +106,24 @@ github.com/databricks/cli/bundle/config/resources.Cluster:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Dashboard:
"_":
"markdown_description": |-
The dashboard resource allows you to manage [AI/BI dashboards](/api/workspace/lakeview/create) in a bundle. For information about AI/BI dashboards, see [_](/dashboards/index.md).
"markdown_examples": |-
The following example includes and deploys the sample __NYC Taxi Trip Analysis__ dashboard to the Databricks workspace.
``` yaml
resources:
dashboards:
nyc_taxi_trip_analysis:
display_name: "NYC Taxi Trip Analysis"
file_path: ../src/nyc_taxi_trip_analysis.lvdash.json
warehouse_id: ${var.warehouse_id}
```
If you use the UI to modify the dashboard, modifications made through the UI are not applied to the dashboard JSON file in the local bundle unless you explicitly update it using `bundle generate`. You can use the `--watch` option to continuously poll and retrieve changes to the dashboard. See [_](/dev-tools/cli/bundle-commands.md#generate).
In addition, if you attempt to deploy a bundle that contains a dashboard JSON file that is different than the one in the remote workspace, an error will occur. To force the deploy and overwrite the dashboard in the remote workspace with the local one, use the `--force` option. See [_](/dev-tools/cli/bundle-commands.md#deploy).
"embed_credentials":
"description": |-
PLACEHOLDER
@ -53,6 +134,24 @@ github.com/databricks/cli/bundle/config/resources.Dashboard:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Job:
"_":
"markdown_description": |-
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a <DABS> template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
"markdown_examples": |-
The following example defines a job with the resource key `hello-job` with one notebook task:
```yaml
resources:
jobs:
hello-job:
name: hello-job
tasks:
- task_key: hello-task
notebook_task:
notebook_path: ./hello.py
```
For information about defining job tasks and overriding job settings, see [_](/dev-tools/bundles/job-task-types.md), [_](/dev-tools/bundles/job-task-override.md), and [_](/dev-tools/bundles/cluster-override.md).
"health":
"description": |-
PLACEHOLDER
@ -63,30 +162,186 @@ github.com/databricks/cli/bundle/config/resources.Job:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
"_":
"markdown_description": |-
The experiment resource allows you to define [MLflow experiments](/api/workspace/experiments/createexperiment) in a bundle. For information about MLflow experiments, see [_](/mlflow/experiments.md).
"markdown_examples": |-
The following example defines an experiment that all users can view:
```yaml
resources:
experiments:
experiment:
name: my_ml_experiment
permissions:
- level: CAN_READ
group_name: users
description: MLflow experiment used to track runs
```
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowModel:
"_":
"markdown_description": |-
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use <UC> [registered models](#registered-model) instead.
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"_":
"markdown_description": |-
The model_serving_endpoint resource allows you to define [model serving endpoints](/api/workspace/servingendpoints/create). See [_](/machine-learning/model-serving/manage-serving-endpoints.md).
"markdown_examples": |-
The following example defines a <UC> model serving endpoint:
```yaml
resources:
model_serving_endpoints:
uc_model_serving_endpoint:
name: "uc-model-endpoint"
config:
served_entities:
- entity_name: "myCatalog.mySchema.my-ads-model"
entity_version: "10"
workload_size: "Small"
scale_to_zero_enabled: "true"
traffic_config:
routes:
- served_model_name: "my-ads-model-10"
traffic_percentage: "100"
tags:
- key: "team"
value: "data science"
```
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Pipeline:
"_":
"markdown_description": |-
The pipeline resource allows you to create <DLT> [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the <DABS> template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
"markdown_examples": |-
The following example defines a pipeline with the resource key `hello-pipeline`:
```yaml
resources:
pipelines:
hello-pipeline:
name: hello-pipeline
clusters:
- label: default
num_workers: 1
development: true
continuous: false
channel: CURRENT
edition: CORE
photon: false
libraries:
- notebook:
path: ./pipeline.py
```
"permissions":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"_":
"markdown_description": |-
The quality_monitor resource allows you to define a <UC> [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
"markdown_examples": |-
The following example defines a quality monitor:
```yaml
resources:
quality_monitors:
my_quality_monitor:
table_name: dev.mlops_schema.predictions
output_schema_name: ${bundle.target}.mlops_schema
assets_dir: /Users/${workspace.current_user.userName}/databricks_lakehouse_monitoring
inference_log:
granularities: [1 day]
model_id_col: model_id
prediction_col: prediction
label_col: price
problem_type: PROBLEM_TYPE_REGRESSION
timestamp_col: timestamp
schedule:
quartz_cron_expression: 0 0 8 * * ? # Run Every day at 8am
timezone_id: UTC
```
"table_name":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"_":
"markdown_description": |-
The registered model resource allows you to define models in <UC>. For information about <UC> [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
"markdown_examples": |-
The following example defines a registered model in <UC>:
```yaml
resources:
registered_models:
model:
name: my_model
catalog_name: ${bundle.target}
schema_name: mlops_schema
comment: Registered model in Unity Catalog for ${bundle.target} deployment target
grants:
- privileges:
- EXECUTE
principal: account users
```
"grants":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Schema:
"_":
"markdown_description": |-
The schema resource type allows you to define <UC> [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.
- Only fields supported by the corresponding [Schemas object create API](/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](/api/workspace/schemas/update).
"markdown_examples": |-
The following example defines a pipeline with the resource key `my_pipeline` that creates a <UC> schema with the key `my_schema` as the target:
```yaml
resources:
pipelines:
my_pipeline:
name: test-pipeline-{{.unique_id}}
libraries:
- notebook:
path: ./nb.sql
development: true
catalog: main
target: ${resources.schemas.my_schema.id}
schemas:
my_schema:
name: test-schema-{{.unique_id}}
catalog_name: main
comment: This schema was created by DABs.
```
A top-level grants mapping is not supported by <DABS>, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
The following example defines a <UC> schema with grants:
```yaml
resources:
schemas:
my_schema:
name: test-schema
grants:
- principal: users
privileges:
- CAN_MANAGE
- principal: my_team
privileges:
- CAN_READ
catalog_name: main
```
"grants":
"description": |-
PLACEHOLDER
@ -94,6 +349,27 @@ github.com/databricks/cli/bundle/config/resources.Schema:
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Volume:
"_":
"markdown_description": |-
The volume resource type allows you to define and create <UC> [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets).
"markdown_examples": |-
The following example creates a <UC> volume with the key `my_volume`:
```yaml
resources:
volumes:
my_volume:
catalog_name: main
name: my_volume
schema_name: my_schema
```
For an example bundle that runs a job that writes to a file in <UC> volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
"grants":
"description": |-
PLACEHOLDER
@ -197,6 +473,85 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
"manual":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
"entity_version":
"description": |-

View File

@ -33,6 +33,10 @@ func TestConvertLinksToAbsoluteUrl(t *testing.T) {
input: "This is a link to [external](https://external.com)",
expected: "This is a link to [external](https://external.com)",
},
{
input: "This is a link to [one](/relative), [two](/relative-2)",
expected: "This is a link to [one](https://docs.databricks.com/relative), [two](https://docs.databricks.com/relative-2)",
},
}
for _, test := range tests {

View File

@ -10,6 +10,7 @@ import (
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader"
@ -113,13 +114,13 @@ func TestNoDetachedAnnotations(t *testing.T) {
assert.Empty(t, types, "Detached annotations found, regenerate schema and check for package path changes")
}
func getAnnotations(path string) (annotationFile, error) {
func getAnnotations(path string) (annotation.File, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var data annotationFile
var data annotation.File
err = yaml.Unmarshal(b, &data)
return data, err
}

View File

@ -8,6 +8,7 @@ import (
"reflect"
"strings"
"github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/jsonschema"
"gopkg.in/yaml.v3"
)
@ -114,8 +115,8 @@ func mapIncorrectTypNames(ref string) string {
// Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error {
annotations := annotationFile{}
overrides := annotationFile{}
annotations := annotation.File{}
overrides := annotation.File{}
b, err := os.ReadFile(overridesPath)
if err != nil {
@ -126,7 +127,7 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
return err
}
if overrides == nil {
overrides = annotationFile{}
overrides = annotation.File{}
}
_, err = jsonschema.FromType(typ, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
@ -137,16 +138,16 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
}
basePath := getPath(typ)
pkg := map[string]annotation{}
pkg := map[string]annotation.Descriptor{}
annotations[basePath] = pkg
if ref.Description != "" || ref.Enum != nil {
pkg[RootTypeKey] = annotation{Description: ref.Description, Enum: ref.Enum}
pkg[RootTypeKey] = annotation.Descriptor{Description: ref.Description, Enum: ref.Enum}
}
for k := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
pkg[k] = annotation{Description: refProp.Description, Enum: refProp.Enum}
pkg[k] = annotation.Descriptor{Description: refProp.Description, Enum: refProp.Enum}
if refProp.Description == "" {
addEmptyOverride(k, basePath, overrides)
}
@ -195,22 +196,22 @@ func prependCommentToFile(outputPath, comment string) error {
return err
}
func addEmptyOverride(key, pkg string, overridesFile annotationFile) {
func addEmptyOverride(key, pkg string, overridesFile annotation.File) {
if overridesFile[pkg] == nil {
overridesFile[pkg] = map[string]annotation{}
overridesFile[pkg] = map[string]annotation.Descriptor{}
}
overrides := overridesFile[pkg]
if overrides[key].Description == "" {
overrides[key] = annotation{Description: Placeholder}
overrides[key] = annotation.Descriptor{Description: annotation.Placeholder}
}
a, ok := overrides[key]
if !ok {
a = annotation{}
a = annotation.Descriptor{}
}
if a.Description == "" {
a.Description = Placeholder
a.Description = annotation.Placeholder
}
overrides[key] = a
}

View File

@ -258,7 +258,8 @@
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The cluster resource defines an [all-purpose cluster](https://docs.databricks.com/api/workspace/clusters/create)."
},
{
"type": "string",
@ -321,7 +322,8 @@
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The dashboard resource allows you to manage [AI/BI dashboards](https://docs.databricks.com/api/workspace/lakeview/create) in a bundle. For information about AI/BI dashboards, see [link](https://docs.databricks.com/dashboards/index.html)."
},
{
"type": "string",
@ -442,7 +444,8 @@
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a \u003cDABS\u003e template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
},
{
"type": "string",
@ -487,7 +490,8 @@
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The experiment resource allows you to define [MLflow experiments](https://docs.databricks.com/api/workspace/experiments/createexperiment) in a bundle. For information about MLflow experiments, see [link](https://docs.databricks.com/mlflow/experiments.html)."
},
{
"type": "string",
@ -532,7 +536,8 @@
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use \u003cUC\u003e [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
},
{
"type": "string",
@ -576,7 +581,8 @@
"additionalProperties": false,
"required": [
"name"
]
],
"markdownDescription": "The model_serving_endpoint resource allows you to define [model serving endpoints](https://docs.databricks.com/api/workspace/servingendpoints/create). See [link](https://docs.databricks.com/machine-learning/model-serving/manage-serving-endpoints.html)."
},
{
"type": "string",
@ -718,7 +724,8 @@
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "The pipeline resource allows you to create \u003cDLT\u003e [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the \u003cDABS\u003e template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
},
{
"type": "string",
@ -792,7 +799,8 @@
"table_name",
"assets_dir",
"output_schema_name"
]
],
"markdownDescription": "The quality_monitor resource allows you to define a \u003cUC\u003e [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
},
{
"type": "string",
@ -834,7 +842,8 @@
"catalog_name",
"name",
"schema_name"
]
],
"markdownDescription": "The registered model resource allows you to define models in \u003cUC\u003e. For information about \u003cUC\u003e [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
},
{
"type": "string",
@ -874,7 +883,8 @@
"required": [
"catalog_name",
"name"
]
],
"markdownDescription": "The schema resource type allows you to define \u003cUC\u003e [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
},
{
"type": "string",
@ -919,7 +929,8 @@
"catalog_name",
"name",
"schema_name"
]
],
"markdownDescription": "The volume resource type allows you to define and create \u003cUC\u003e [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use \u003cDABS\u003e to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
},
{
"type": "string",
@ -1005,6 +1016,7 @@
},
"variable.Variable": {
"type": "object",
"description": "Defines a custom variable for the bundle.",
"properties": {
"default": {
"$ref": "#/$defs/interface"
@ -1016,14 +1028,15 @@
"lookup": {
"description": "The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup",
"markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID.\""
"markdownDescription": "The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
},
"type": {
"description": "The type of the variable.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType"
}
},
"additionalProperties": false
"additionalProperties": false,
"markdownDescription": "Defines a custom variable for the bundle. See [variables](https://docs.databricks.com/dev-tools/bundles/settings.html#variables)."
},
"variable.VariableType": {
"type": "string"
@ -1035,26 +1048,25 @@
"type": "object",
"properties": {
"build": {
"description": "An optional set of non-default build commands that you want to run locally before deployment.\n\nFor Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment.\n\nTo specify multiple build commands, separate each command with double-ampersand (\u0026\u0026) characters.",
"description": "An optional set of non-default build commands to run locally before deployment.",
"$ref": "#/$defs/string"
},
"executable": {
"description": "The executable type.",
"description": "The executable type. Valid values are `bash`, `sh`, and `cmd`.",
"$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType"
},
"files": {
"description": "The source files for the artifact.",
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile",
"markdownDescription": "The source files for the artifact, defined as an [artifact_file](https://docs.databricks.com/dev-tools/bundles/reference.html#artifact_file)."
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile"
},
"path": {
"description": "The location where the built artifact will be saved.",
"$ref": "#/$defs/string"
},
"type": {
"description": "The type of the artifact.",
"description": "Required. The type of the artifact.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType",
"markdownDescription": "The type of the artifact. Valid values are `wheel` or `jar`"
"markdownDescription": "Required. The type of the artifact. Valid values are `whl`."
}
},
"additionalProperties": false,
@ -1074,7 +1086,7 @@
"type": "object",
"properties": {
"source": {
"description": "The path of the files used to build the artifact.",
"description": "Required. The path of the files used to build the artifact.",
"$ref": "#/$defs/string"
}
},
@ -1113,12 +1125,12 @@
"deployment": {
"description": "The definition of the bundle deployment",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment",
"markdownDescription": "The definition of the bundle deployment. For supported attributes, see [deployment](https://docs.databricks.com/dev-tools/bundles/reference.html#deployment) and [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)."
"markdownDescription": "The definition of the bundle deployment. For supported attributes see [link](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html)."
},
"git": {
"description": "The Git version control details that are associated with your bundle.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git",
"markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes, see [git](https://docs.databricks.com/dev-tools/bundles/reference.html#git) and [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)."
"markdownDescription": "The Git version control details that are associated with your bundle. For supported attributes see [git](https://docs.databricks.com/dev-tools/bundles/settings.html#git)."
},
"name": {
"description": "The name of the bundle.",
@ -1154,8 +1166,7 @@
},
"lock": {
"description": "The deployment lock attributes.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock",
"markdownDescription": "The deployment lock attributes. See [lock](https://docs.databricks.com/dev-tools/bundles/reference.html#lock)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock"
}
},
"additionalProperties": false
@ -1180,15 +1191,15 @@
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Python"
},
"python_wheel_wrapper": {
"description": "Whether to use a Python wheel wrapper",
"description": "Whether to use a Python wheel wrapper.",
"$ref": "#/$defs/bool"
},
"scripts": {
"description": "The commands to run",
"description": "The commands to run.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command"
},
"use_legacy_run_as": {
"description": "Whether to use the legacy run_as behavior",
"description": "Whether to use the legacy run_as behavior.",
"$ref": "#/$defs/bool"
}
},
@ -1352,57 +1363,59 @@
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App"
},
"clusters": {
"description": "The cluster definitions for the bundle.",
"description": "The cluster definitions for the bundle, where each key is the name of a cluster.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster",
"markdownDescription": "The cluster definitions for the bundle. See [cluster](https://docs.databricks.com/dev-tools/bundles/resources.html#cluster)"
"markdownDescription": "The cluster definitions for the bundle, where each key is the name of a cluster. See [clusters](https://docs.databricks.com/dev-tools/bundles/resources.html#clusters)"
},
"dashboards": {
"description": "The dashboard definitions for the bundle.",
"description": "The dashboard definitions for the bundle, where each key is the name of the dashboard.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard",
"markdownDescription": "The dashboard definitions for the bundle. See [dashboard](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboard)"
"markdownDescription": "The dashboard definitions for the bundle, where each key is the name of the dashboard. See [dashboards](https://docs.databricks.com/dev-tools/bundles/resources.html#dashboards)"
},
"experiments": {
"description": "The experiment definitions for the bundle.",
"description": "The experiment definitions for the bundle, where each key is the name of the experiment.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment",
"markdownDescription": "The experiment definitions for the bundle. See [experiment](https://docs.databricks.com/dev-tools/bundles/resources.html#experiment)"
"markdownDescription": "The experiment definitions for the bundle, where each key is the name of the experiment. See [experiments](https://docs.databricks.com/dev-tools/bundles/resources.html#experiments)"
},
"jobs": {
"description": "The job definitions for the bundle.",
"description": "The job definitions for the bundle, where each key is the name of the job.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job",
"markdownDescription": "The job definitions for the bundle. See [job](https://docs.databricks.com/dev-tools/bundles/resources.html#job)"
"markdownDescription": "The job definitions for the bundle, where each key is the name of the job. See [jobs](https://docs.databricks.com/dev-tools/bundles/resources.html#jobs)"
},
"model_serving_endpoints": {
"description": "The model serving endpoint definitions for the bundle.",
"description": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint",
"markdownDescription": "The model serving endpoint definitions for the bundle. See [model_serving_endpoint](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoint)"
"markdownDescription": "The model serving endpoint definitions for the bundle, where each key is the name of the model serving endpoint. See [model_serving_endpoints](https://docs.databricks.com/dev-tools/bundles/resources.html#model_serving_endpoints)"
},
"models": {
"description": "The model definitions for the bundle.",
"description": "The model definitions for the bundle, where each key is the name of the model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel",
"markdownDescription": "The model definitions for the bundle. See [model](https://docs.databricks.com/dev-tools/bundles/resources.html#model)"
"markdownDescription": "The model definitions for the bundle, where each key is the name of the model. See [models](https://docs.databricks.com/dev-tools/bundles/resources.html#models)"
},
"pipelines": {
"description": "The pipeline definitions for the bundle.",
"description": "The pipeline definitions for the bundle, where each key is the name of the pipeline.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline",
"markdownDescription": "The pipeline definitions for the bundle. See [pipeline](https://docs.databricks.com/dev-tools/bundles/resources.html#pipeline)"
"markdownDescription": "The pipeline definitions for the bundle, where each key is the name of the pipeline. See [pipelines](https://docs.databricks.com/dev-tools/bundles/resources.html#pipelines)"
},
"quality_monitors": {
"description": "The quality monitor definitions for the bundle.",
"description": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor",
"markdownDescription": "The quality monitor definitions for the bundle. See [quality_monitor](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitor)"
"markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)"
},
"registered_models": {
"description": "The registered model definitions for the bundle.",
"description": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel",
"markdownDescription": "The registered model definitions for the bundle. See [registered_model](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_model)"
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
},
"schemas": {
"description": "The schema definitions for the bundle.",
"description": "The schema definitions for the bundle, where each key is the name of the schema.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema",
"markdownDescription": "The schema definitions for the bundle. See [schema](https://docs.databricks.com/dev-tools/bundles/resources.html#schema)"
"markdownDescription": "The schema definitions for the bundle, where each key is the name of the schema. See [schemas](https://docs.databricks.com/dev-tools/bundles/resources.html#schemas)"
},
"volumes": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume"
"description": "The volume definitions for the bundle, where each key is the name of the volume.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Volume",
"markdownDescription": "The volume definitions for the bundle, where each key is the name of the volume. See [volumes](https://docs.databricks.com/dev-tools/bundles/resources.html#volumes)"
}
},
"additionalProperties": false
@ -1446,11 +1459,10 @@
"properties": {
"artifacts": {
"description": "The artifacts to include in the target deployment.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact",
"markdownDescription": "The artifacts to include in the target deployment. See [artifact](https://docs.databricks.com/dev-tools/bundles/reference.html#artifact)"
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
},
"bundle": {
"description": "The name of the bundle when deploying to this target.",
"description": "The bundle attributes when deploying to this target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle"
},
"cluster_id": {
@ -1467,8 +1479,7 @@
},
"git": {
"description": "The Git version control settings for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git",
"markdownDescription": "The Git version control settings for the target. See [git](https://docs.databricks.com/dev-tools/bundles/reference.html#git)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git"
},
"mode": {
"description": "The deployment mode for the target.",
@ -1477,38 +1488,32 @@
},
"permissions": {
"description": "The permissions for deploying and running the bundle in the target.",
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission",
"markdownDescription": "The permissions for deploying and running the bundle in the target. See [permission](https://docs.databricks.com/dev-tools/bundles/reference.html#permission)."
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"presets": {
"description": "The deployment presets for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets",
"markdownDescription": "The deployment presets for the target. See [preset](https://docs.databricks.com/dev-tools/bundles/reference.html#preset)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets"
},
"resources": {
"description": "The resource definitions for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "The resource definitions for the target. See [resources](https://docs.databricks.com/dev-tools/bundles/reference.html#resources)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources"
},
"run_as": {
"description": "The identity to use to run the bundle.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use to run the bundle. See [job_run_as](https://docs.databricks.com/dev-tools/bundles/reference.html#job_run_as) and [link](https://docs.databricks.com/dev-tools/bundles/run_as.html)."
"markdownDescription": "The identity to use to run the bundle, see [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The local paths to sync to the target workspace when a bundle is run or deployed.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync",
"markdownDescription": "The local paths to sync to the target workspace when a bundle is run or deployed. See [sync](https://docs.databricks.com/dev-tools/bundles/reference.html#sync)."
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync"
},
"variables": {
"description": "The custom variable definitions for the target.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable",
"markdownDescription": "The custom variable definitions for the target. See [variables](https://docs.databricks.com/dev-tools/bundles/settings.html#variables) and [link](https://docs.databricks.com/dev-tools/bundles/variables.html)."
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable"
},
"workspace": {
"description": "The Databricks workspace for the target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace",
"markdownDescription": "The Databricks workspace for the target. [workspace](https://docs.databricks.com/dev-tools/bundles/reference.html#workspace)"
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
}
},
"additionalProperties": false
@ -1719,12 +1724,14 @@
"type": "object",
"properties": {
"description": {
"description": "Description of the App Resource.",
"$ref": "#/$defs/string"
},
"job": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob"
},
"name": {
"description": "Name of the App Resource.",
"$ref": "#/$defs/string"
},
"secret": {
@ -1980,6 +1987,7 @@
"$ref": "#/$defs/string"
},
"state": {
"description": "State of the app compute.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ComputeState"
}
},
@ -3701,7 +3709,7 @@
"description": "Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job.\n\nEither `user_name` or `service_principal_name` should be specified. If not, an error is thrown.",
"properties": {
"service_principal_name": {
"description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.",
"description": "The application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.",
"$ref": "#/$defs/string"
},
"user_name": {
@ -7227,12 +7235,13 @@
"properties": {
"artifacts": {
"description": "Defines the attributes to build an artifact",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact",
"markdownDescription": "Defines the attributes to build artifacts, where each key is the name of the artifact, and the value is a Map that defines the artifact build settings. For information about the `artifacts` mapping, see [artifacts](https://docs.databricks.com/dev-tools/bundles/settings.html#artifacts).\n\nArtifact settings defined in the top level of the bundle configuration can be overridden in the `targets` mapping. See [link](https://docs.databricks.com/dev-tools/bundles/artifact-overrides.html)."
},
"bundle": {
"description": "The attributes of the bundle.",
"description": "The bundle attributes when deploying to this target.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle",
"markdownDescription": "The attributes of the bundle. See [bundle](https://docs.databricks.com/dev-tools/bundles/settings.html#bundle)"
"markdownDescription": "The bundle attributes when deploying to this target,"
},
"experimental": {
"description": "Defines attributes for experimental features.",
@ -7244,9 +7253,9 @@
"markdownDescription": "Specifies a list of path globs that contain configuration files to include within the bundle. See [include](https://docs.databricks.com/dev-tools/bundles/settings.html#include)"
},
"permissions": {
"description": "Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle",
"description": "Defines a permission for a specific entity.",
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission",
"markdownDescription": "Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle. See [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html)."
"markdownDescription": "A Sequence that defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle, where each item in the sequence is a permission for a specific entity.\n\nSee [permissions](https://docs.databricks.com/dev-tools/bundles/settings.html#permissions) and [link](https://docs.databricks.com/dev-tools/bundles/permissions.html)."
},
"presets": {
"description": "Defines bundle deployment presets.",
@ -7254,22 +7263,24 @@
"markdownDescription": "Defines bundle deployment presets. See [presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#presets)."
},
"resources": {
"description": "Specifies information about the Databricks resources used by the bundle",
"description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "Specifies information about the Databricks resources used by the bundle. See [link](https://docs.databricks.com/dev-tools/bundles/resources.html)."
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about \u003cDABS\u003e supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
},
"run_as": {
"description": "The identity to use to run the bundle.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs"
"description": "The identity to use when running \u003cDABS\u003e workflows.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use when running \u003cDABS\u003e workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The files and file paths to include or exclude in the bundle.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync",
"markdownDescription": "The files and file paths to include or exclude in the bundle. See [link](https://docs.databricks.com/dev-tools/bundles/)"
"markdownDescription": "The files and file paths to include or exclude in the bundle. See [sync](https://docs.databricks.com/dev-tools/bundles/settings.html#sync)."
},
"targets": {
"description": "Defines deployment targets for the bundle.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target"
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target",
"markdownDescription": "Defines deployment targets for the bundle. See [targets](https://docs.databricks.com/dev-tools/bundles/settings.html#targets)"
},
"variables": {
"description": "A Map that defines the custom variables for the bundle, where each key is the name of the variable, and the value is a Map that defines the variable.",
@ -7277,7 +7288,8 @@
},
"workspace": {
"description": "Defines the Databricks workspace for the bundle.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace",
"markdownDescription": "Defines the Databricks workspace for the bundle. See [workspace](https://docs.databricks.com/dev-tools/bundles/settings.html#workspace)."
}
},
"additionalProperties": {}

View File

@ -111,6 +111,10 @@ func FromType(typ reflect.Type, fns []func(typ reflect.Type, s Schema) Schema) (
return res, nil
}
func TypePath(typ reflect.Type) string {
return typePath(typ)
}
// typePath computes a unique string representation of the type. $ref in the generated
// JSON schema will refer to this path. See TestTypePath for examples outputs.
func typePath(typ reflect.Type) string {

View File

@ -76,6 +76,10 @@ type Schema struct {
// Title of the object, rendered as inline documentation in the IDE.
// https://json-schema.org/understanding-json-schema/reference/annotations
Title string `json:"title,omitempty"`
// Examples of the value for properties in the schema.
// https://json-schema.org/understanding-json-schema/reference/annotations
Examples []any `json:"examples,omitempty"`
}
// Default value defined in a JSON Schema, represented as a string.