Adapt docsgen output for new documentation framework (#2262)

## Changes

Update `docsgen` output to align with new documentation framework

1. New header style, previously we had brackets for `<name>`, now it's
in italic `_name_`
2. Updated broken markdown in OpenAPI descriptions
3. Table markdown has different structure in new framework

## Tests

Updated existing tests

NO_CHANGELOG=true
This commit is contained in:
Ilya Kuznetsov 2025-03-07 19:11:08 +01:00 committed by GitHub
parent af3914db61
commit 0c809db82a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 5712 additions and 5254 deletions

View File

@ -7,6 +7,7 @@ import (
"path" "path"
"reflect" "reflect"
"strings" "strings"
"time"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/internal/annotation" "github.com/databricks/cli/bundle/internal/annotation"
@ -43,7 +44,7 @@ func main() {
[]string{path.Join(annotationDir, "annotations.yml")}, []string{path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, rootFileName), path.Join(outputDir, rootFileName),
reflect.TypeOf(config.Root{}), reflect.TypeOf(config.Root{}),
string(rootHeader), fillTemplateVariables(string(rootHeader)),
) )
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@ -56,7 +57,7 @@ func main() {
[]string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")}, []string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")},
path.Join(outputDir, resourcesFileName), path.Join(outputDir, resourcesFileName),
reflect.TypeOf(config.Resources{}), reflect.TypeOf(config.Resources{}),
string(resourcesHeader), fillTemplateVariables(string(resourcesHeader)),
) )
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@ -133,3 +134,8 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
s.Examples = []string{a.MarkdownExamples} s.Examples = []string{a.MarkdownExamples}
} }
} }
func fillTemplateVariables(s string) string {
currentDate := time.Now().Format("2006-01-02")
return strings.ReplaceAll(s, "{{update_date}}", currentDate)
}

View File

@ -12,7 +12,7 @@ func buildMarkdown(nodes []rootNode, outputFile, header string) error {
m = m.PlainText(header) m = m.PlainText(header)
for _, node := range nodes { for _, node := range nodes {
m = m.LF() m = m.LF()
title := escapeBrackets(node.Title) title := node.Title
if node.TopLevel { if node.TopLevel {
m = m.H2(title) m = m.H2(title)
} else { } else {
@ -68,21 +68,24 @@ func pickLastWord(s string) string {
// Build a custom table which we use in Databricks website // Build a custom table which we use in Databricks website
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer { func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
m = m.LF() m = m.LF()
m = m.PlainText(".. list-table::") m = m.PlainText(":::list-table")
m = m.PlainText(" :header-rows: 1")
m = m.LF() m = m.LF()
m = m.PlainText(" * - Key") m = m.PlainText("- - Key")
m = m.PlainText(" - Type") m = m.PlainText(" - Type")
m = m.PlainText(" - Description") m = m.PlainText(" - Description")
m = m.LF() m = m.LF()
for _, a := range attributes { for _, a := range attributes {
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title)) m = m.PlainText("- - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type) m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a)) m = m.PlainText(" - " + formatDescription(a))
m = m.LF() m = m.LF()
} }
m = m.PlainText(":::")
m = m.LF()
return m return m
} }
@ -94,7 +97,7 @@ func formatDescription(a attributeNode) string {
} else if s != "" { } else if s != "" {
s += ". " s += ". "
} }
s += fmt.Sprintf("See [_](#%s).", cleanAnchor(a.Link)) s += fmt.Sprintf("See [\\_](#%s).", cleanAnchor(a.Link))
} }
return s return s
} }
@ -102,15 +105,7 @@ func formatDescription(a attributeNode) string {
// Docs framework does not allow special characters in anchor links and strip them out by default // Docs framework does not allow special characters in anchor links and strip them out by default
// We need to clean them up to make sure the links pass the validation // We need to clean them up to make sure the links pass the validation
func cleanAnchor(s string) string { func cleanAnchor(s string) string {
s = strings.ReplaceAll(s, "<", "")
s = strings.ReplaceAll(s, ">", "")
s = strings.ReplaceAll(s, ".", "") s = strings.ReplaceAll(s, ".", "")
s = strings.ReplaceAll(s, nameFieldWithFormat, nameField)
return s
}
func escapeBrackets(s string) string {
s = strings.ReplaceAll(s, "<", "\\<")
s = strings.ReplaceAll(s, ">", "\\>")
return s return s
} }

View File

@ -20,12 +20,12 @@ func TestBuildMarkdownAnchors(t *testing.T) {
Title: "my_attribute", Title: "my_attribute",
Type: "Map", Type: "Map",
Description: "Desc with link", Description: "Desc with link",
Link: "some_field.<name>.my_attribute", Link: "some_field._name_.my_attribute",
}, },
}, },
}, },
{ {
Title: "some_field.<name>.my_attribute", Title: "some_field._name_.my_attribute",
TopLevel: false, TopLevel: false,
Type: "Boolean", Type: "Boolean",
Description: "Another description", Description: "Another description",

View File

@ -137,8 +137,13 @@ func getMapValueType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema) *
return nil return nil
} }
const (
nameField = "name"
nameFieldWithFormat = "_name_"
)
func getMapKeyPrefix(s string) string { func getMapKeyPrefix(s string) string {
return s + ".<name>" return s + "." + nameFieldWithFormat
} }
func removePluralForm(s string) string { func removePluralForm(s string) string {

View File

@ -93,11 +93,11 @@ func TestBuildNodes_ChildExpansion(t *testing.T) {
TopLevel: true, TopLevel: true,
Type: "Map", Type: "Map",
ObjectKeyAttributes: []attributeNode{ ObjectKeyAttributes: []attributeNode{
{Title: "mapSub", Type: "Map", Link: "myMap.<name>.mapSub"}, {Title: "mapSub", Type: "Map", Link: "myMap._name_.mapSub"},
}, },
}, },
{ {
Title: "myMap.<name>.mapSub", Title: "myMap._name_.mapSub",
Type: "Map", Type: "Map",
Attributes: []attributeNode{ Attributes: []attributeNode{
{Title: "deepSub", Type: "Boolean"}, {Title: "deepSub", Type: "Boolean"},

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
--- ---
description: 'Configuration reference for databricks.yml' description: 'Configuration reference for databricks.yml'
last_update: last_update:
date: 2025-02-14 date: {{update_date}}
--- ---
<!--DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli--> <!--DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli-->

View File

@ -1,7 +1,7 @@
--- ---
description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.' description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.'
last_update: last_update:
date: 2025-02-14 date: {{update_date}}
--- ---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli --> <!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->

View File

@ -8,19 +8,20 @@ This is a description
.. list-table:: :::list-table
:header-rows: 1
* - Key - - Key
- Type - Type
- Description - Description
* - `my_attribute` - - `my_attribute`
- Map - Map
- Desc with link. See [_](#some_fieldnamemy_attribute). - Desc with link. See [\_](#some_fieldnamemy_attribute).
:::
### some_field.\<name\>.my_attribute ### some_field._name_.my_attribute
**`Type: Boolean`** **`Type: Boolean`**

View File

@ -188,9 +188,9 @@ github.com/databricks/cli/bundle/config.Resources:
The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [_](/dev-tools/bundles/resources.md#quality_monitors). The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [_](/dev-tools/bundles/resources.md#quality_monitors).
"registered_models": "registered_models":
"description": |- "description": |-
The registered model definitions for the bundle, where each key is the name of the <UC> registered model. The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.
"markdown_description": |- "markdown_description": |-
The registered model definitions for the bundle, where each key is the name of the <UC> registered model. See [_](/dev-tools/bundles/resources.md#registered_models). The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [_](/dev-tools/bundles/resources.md#registered_models)
"schemas": "schemas":
"description": |- "description": |-
The schema definitions for the bundle, where each key is the name of the schema. The schema definitions for the bundle, where each key is the name of the schema.
@ -261,7 +261,7 @@ github.com/databricks/cli/bundle/config.Root:
"description": |- "description": |-
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.
"markdown_description": |- "markdown_description": |-
A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about <DABS> supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md). A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [_](/dev-tools/bundles/resources.md).
```yaml ```yaml
resources: resources:
@ -271,9 +271,9 @@ github.com/databricks/cli/bundle/config.Root:
``` ```
"run_as": "run_as":
"description": |- "description": |-
The identity to use when running <DABS> workflows. The identity to use when running Databricks Asset Bundles workflows.
"markdown_description": |- "markdown_description": |-
The identity to use when running <DABS> workflows. See [_](/dev-tools/bundles/run-as.md). The identity to use when running Databricks Asset Bundles workflows. See [_](/dev-tools/bundles/run-as.md).
"sync": "sync":
"description": |- "description": |-
The files and file paths to include or exclude in the bundle. The files and file paths to include or exclude in the bundle.

View File

@ -140,7 +140,7 @@ github.com/databricks/cli/bundle/config/resources.Dashboard:
github.com/databricks/cli/bundle/config/resources.Job: github.com/databricks/cli/bundle/config/resources.Job:
"_": "_":
"markdown_description": |- "markdown_description": |-
The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a <DABS> template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md). The job resource allows you to define [jobs and their corresponding tasks](/api/workspace/jobs/create) in your bundle. For information about jobs, see [_](/jobs/index.md). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [_](/dev-tools/bundles/jobs-tutorial.md).
"markdown_examples": |- "markdown_examples": |-
The following example defines a job with the resource key `hello-job` with one notebook task: The following example defines a job with the resource key `hello-job` with one notebook task:
@ -188,7 +188,7 @@ github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
github.com/databricks/cli/bundle/config/resources.MlflowModel: github.com/databricks/cli/bundle/config/resources.MlflowModel:
"_": "_":
"markdown_description": |- "markdown_description": |-
The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use <UC> [registered models](#registered-model) instead. The model resource allows you to define [legacy models](/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](#registered-model) instead.
"permissions": "permissions":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
@ -197,7 +197,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"markdown_description": |- "markdown_description": |-
The model_serving_endpoint resource allows you to define [model serving endpoints](/api/workspace/servingendpoints/create). See [_](/machine-learning/model-serving/manage-serving-endpoints.md). The model_serving_endpoint resource allows you to define [model serving endpoints](/api/workspace/servingendpoints/create). See [_](/machine-learning/model-serving/manage-serving-endpoints.md).
"markdown_examples": |- "markdown_examples": |-
The following example defines a <UC> model serving endpoint: The following example defines a Unity Catalog model serving endpoint:
```yaml ```yaml
resources: resources:
@ -224,7 +224,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
github.com/databricks/cli/bundle/config/resources.Pipeline: github.com/databricks/cli/bundle/config/resources.Pipeline:
"_": "_":
"markdown_description": |- "markdown_description": |-
The pipeline resource allows you to create <DLT> [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the <DABS> template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md). The pipeline resource allows you to create Delta Live Tables [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/dlt/index.md). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
"markdown_examples": |- "markdown_examples": |-
The following example defines a pipeline with the resource key `hello-pipeline`: The following example defines a pipeline with the resource key `hello-pipeline`:
@ -257,7 +257,7 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
github.com/databricks/cli/bundle/config/resources.QualityMonitor: github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"_": "_":
"markdown_description": |- "markdown_description": |-
The quality_monitor resource allows you to define a <UC> [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md). The quality_monitor resource allows you to define a Unity Catalog [table monitor](/api/workspace/qualitymonitors/create). For information about monitors, see [_](/machine-learning/model-serving/monitor-diagnose-endpoints.md).
"markdown_examples": |- "markdown_examples": |-
The following example defines a quality monitor: The following example defines a quality monitor:
@ -285,9 +285,9 @@ github.com/databricks/cli/bundle/config/resources.QualityMonitor:
github.com/databricks/cli/bundle/config/resources.RegisteredModel: github.com/databricks/cli/bundle/config/resources.RegisteredModel:
"_": "_":
"markdown_description": |- "markdown_description": |-
The registered model resource allows you to define models in <UC>. For information about <UC> [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md). The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](/api/workspace/registeredmodels/create), see [_](/machine-learning/manage-model-lifecycle/index.md).
"markdown_examples": |- "markdown_examples": |-
The following example defines a registered model in <UC>: The following example defines a registered model in Unity Catalog:
```yaml ```yaml
resources: resources:
@ -308,12 +308,12 @@ github.com/databricks/cli/bundle/config/resources.RegisteredModel:
github.com/databricks/cli/bundle/config/resources.Schema: github.com/databricks/cli/bundle/config/resources.Schema:
"_": "_":
"markdown_description": |- "markdown_description": |-
The schema resource type allows you to define <UC> [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations: The schema resource type allows you to define Unity Catalog [schemas](/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:
- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema. - The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.
- Only fields supported by the corresponding [Schemas object create API](/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](/api/workspace/schemas/update). - Only fields supported by the corresponding [Schemas object create API](/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](/api/workspace/schemas/update).
"markdown_examples": |- "markdown_examples": |-
The following example defines a pipeline with the resource key `my_pipeline` that creates a <UC> schema with the key `my_schema` as the target: The following example defines a pipeline with the resource key `my_pipeline` that creates a Unity Catalog schema with the key `my_schema` as the target:
```yaml ```yaml
resources: resources:
@ -334,9 +334,9 @@ github.com/databricks/cli/bundle/config/resources.Schema:
comment: This schema was created by DABs. comment: This schema was created by DABs.
``` ```
A top-level grants mapping is not supported by <DABS>, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant). A top-level grants mapping is not supported by Databricks Asset Bundles, so if you want to set grants for a schema, define the grants for the schema within the `schemas` mapping. For more information about grants, see [_](/data-governance/unity-catalog/manage-privileges/index.md#grant).
The following example defines a <UC> schema with grants: The following example defines a Unity Catalog schema with grants:
```yaml ```yaml
resources: resources:
@ -361,13 +361,13 @@ github.com/databricks/cli/bundle/config/resources.Schema:
github.com/databricks/cli/bundle/config/resources.Volume: github.com/databricks/cli/bundle/config/resources.Volume:
"_": "_":
"markdown_description": |- "markdown_description": |-
The volume resource type allows you to define and create <UC> [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that: The volume resource type allows you to define and create Unity Catalog [volumes](/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments. - A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets). - Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets).
"markdown_examples": |- "markdown_examples": |-
The following example creates a <UC> volume with the key `my_volume`: The following example creates a Unity Catalog volume with the key `my_volume`:
```yaml ```yaml
resources: resources:
@ -378,7 +378,7 @@ github.com/databricks/cli/bundle/config/resources.Volume:
schema_name: my_schema schema_name: my_schema
``` ```
For an example bundle that runs a job that writes to a file in <UC> volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume). For an example bundle that runs a job that writes to a file in Unity Catalog volume, see the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples/tree/main/knowledge_base/write_from_job_to_volume).
"grants": "grants":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
@ -579,3 +579,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
"model_version": "model_version":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
"abfss":
"description": |-
Contains the Azure Data Lake Storage destination path
github.com/databricks/databricks-sdk-go/service/compute.Environment:
"dependencies":
"description": |-
List of pip dependencies, as supported by the version of pip in this environment.
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
"granularities":
"description": |-
Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
"granularities":
"description": |-
Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
"log_analytics_primary_key":
"description": |-
The primary key for the Azure Log Analytics agent configuration
"log_analytics_workspace_id":
"description": |-
The workspace ID for the Azure Log Analytics agent configuration

View File

@ -459,7 +459,7 @@
} }
}, },
"additionalProperties": false, "additionalProperties": false,
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a \u003cDABS\u003e template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)." "markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
}, },
{ {
"type": "string", "type": "string",
@ -552,7 +552,7 @@
} }
}, },
"additionalProperties": false, "additionalProperties": false,
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use \u003cUC\u003e [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead." "markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
}, },
{ {
"type": "string", "type": "string",
@ -743,7 +743,7 @@
} }
}, },
"additionalProperties": false, "additionalProperties": false,
"markdownDescription": "The pipeline resource allows you to create \u003cDLT\u003e [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the \u003cDABS\u003e template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)." "markdownDescription": "The pipeline resource allows you to create Delta Live Tables [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/dlt/index.html). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
}, },
{ {
"type": "string", "type": "string",
@ -818,7 +818,7 @@
"assets_dir", "assets_dir",
"output_schema_name" "output_schema_name"
], ],
"markdownDescription": "The quality_monitor resource allows you to define a \u003cUC\u003e [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)." "markdownDescription": "The quality_monitor resource allows you to define a Unity Catalog [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
}, },
{ {
"type": "string", "type": "string",
@ -861,7 +861,7 @@
"name", "name",
"schema_name" "schema_name"
], ],
"markdownDescription": "The registered model resource allows you to define models in \u003cUC\u003e. For information about \u003cUC\u003e [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)." "markdownDescription": "The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
}, },
{ {
"type": "string", "type": "string",
@ -902,7 +902,7 @@
"catalog_name", "catalog_name",
"name" "name"
], ],
"markdownDescription": "The schema resource type allows you to define \u003cUC\u003e [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)." "markdownDescription": "The schema resource type allows you to define Unity Catalog [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
}, },
{ {
"type": "string", "type": "string",
@ -948,7 +948,7 @@
"name", "name",
"schema_name" "schema_name"
], ],
"markdownDescription": "The volume resource type allows you to define and create \u003cUC\u003e [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use \u003cDABS\u003e to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)." "markdownDescription": "The volume resource type allows you to define and create Unity Catalog [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
}, },
{ {
"type": "string", "type": "string",
@ -1440,9 +1440,9 @@
"markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)." "markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)."
}, },
"registered_models": { "registered_models": {
"description": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model.", "description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel",
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)." "markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
}, },
"schemas": { "schemas": {
"description": "The schema definitions for the bundle, where each key is the name of the schema.", "description": "The schema definitions for the bundle, where each key is the name of the schema.",
@ -2126,7 +2126,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"granularities": { "granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n", "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.",
"$ref": "#/$defs/slice/string" "$ref": "#/$defs/slice/string"
}, },
"label_col": { "label_col": {
@ -2283,7 +2283,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"granularities": { "granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n", "description": "Granularities for aggregating data into time windows based on their timestamp. Valid values are 5 minutes, 30 minutes, 1 hour, 1 day, n weeks, 1 month, or 1 year.",
"$ref": "#/$defs/slice/string" "$ref": "#/$defs/slice/string"
}, },
"timestamp_col": { "timestamp_col": {
@ -2768,7 +2768,7 @@
"$ref": "#/$defs/string" "$ref": "#/$defs/string"
}, },
"dependencies": { "dependencies": {
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]", "description": "List of pip dependencies, as supported by the version of pip in this environment.",
"$ref": "#/$defs/slice/string" "$ref": "#/$defs/slice/string"
} }
}, },
@ -2864,7 +2864,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"abfss": { "abfss": {
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }", "description": "Contains the Azure Data Lake Storage destination path",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info" "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
}, },
"dbfs": { "dbfs": {
@ -2972,11 +2972,11 @@
"type": "object", "type": "object",
"properties": { "properties": {
"log_analytics_primary_key": { "log_analytics_primary_key": {
"description": "\u003cneeds content added\u003e", "description": "The primary key for the Azure Log Analytics agent configuration",
"$ref": "#/$defs/string" "$ref": "#/$defs/string"
}, },
"log_analytics_workspace_id": { "log_analytics_workspace_id": {
"description": "\u003cneeds content added\u003e", "description": "The workspace ID for the Azure Log Analytics agent configuration",
"$ref": "#/$defs/string" "$ref": "#/$defs/string"
} }
}, },
@ -7440,12 +7440,12 @@
"resources": { "resources": {
"description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.", "description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about \u003cDABS\u003e supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```" "markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
}, },
"run_as": { "run_as": {
"description": "The identity to use when running \u003cDABS\u003e workflows.", "description": "The identity to use when running Databricks Asset Bundles workflows.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use when running \u003cDABS\u003e workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)." "markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
}, },
"sync": { "sync": {
"description": "The files and file paths to include or exclude in the bundle.", "description": "The files and file paths to include or exclude in the bundle.",