Update docgen to use with docusaurus

This commit is contained in:
Ilya Kuznetsov 2025-01-28 11:16:38 +01:00
parent ee693532b8
commit dd9af21fe6
No known key found for this signature in database
GPG Key ID: 91F3DDCF5D21CDDF
8 changed files with 5552 additions and 5239 deletions

View File

@ -12,10 +12,11 @@ func buildMarkdown(nodes []rootNode, outputFile, header string) error {
m = m.PlainText(header)
for _, node := range nodes {
m = m.LF()
title := escapeBrackets(node.Title)
if node.TopLevel {
m = m.H2(node.Title)
m = m.H2(title)
} else {
m = m.H3(node.Title)
m = m.H3(title)
}
m = m.LF()
@ -67,21 +68,24 @@ func pickLastWord(s string) string {
// Build a custom table which we use in Databricks website
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
m = m.LF()
m = m.PlainText(".. list-table::")
m = m.PlainText(" :header-rows: 1")
m = m.PlainText(":::list-table")
m = m.LF()
m = m.PlainText(" * - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.PlainText("- - Key")
m = m.PlainText(" - Type")
m = m.PlainText(" - Description")
m = m.LF()
for _, a := range attributes {
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.PlainText("- - " + fmt.Sprintf("`%s`", a.Title))
m = m.PlainText(" - " + a.Type)
m = m.PlainText(" - " + formatDescription(a))
m = m.LF()
}
m = m.PlainText(":::")
m = m.LF()
return m
}
@ -93,7 +97,21 @@ func formatDescription(a attributeNode) string {
} else if s != "" {
s += ". "
}
s += fmt.Sprintf("See [_](#%s).", a.Link)
s += fmt.Sprintf("See [_](#%s).", cleanAnchor(a.Link))
}
return s
}
func cleanAnchor(s string) string {
s = strings.ReplaceAll(s, "<", "")
s = strings.ReplaceAll(s, ">", "")
s = strings.ReplaceAll(s, ".", "")
return s
}
func escapeBrackets(s string) string {
s = strings.ReplaceAll(s, "<", "\\<")
s = strings.ReplaceAll(s, ">", "\\>")
return s
}

View File

@ -65,7 +65,7 @@ func buildNodes(s jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFiel
v = resolveRefs(v, refs)
node := rootNode{
Title: k,
Description: getDescription(v, item.topLevel),
Description: getDescription(v),
TopLevel: item.topLevel,
Example: getExample(v),
Type: getHumanReadableType(v.Type),
@ -78,7 +78,7 @@ func buildNodes(s jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFiel
mapValueType := getMapValueType(v, refs)
if mapValueType != nil {
d := getDescription(mapValueType, true)
d := getDescription(mapValueType)
if d != "" {
node.Description = d
}
@ -174,7 +174,7 @@ func getAttributes(props, refs map[string]*jsonschema.Schema, ownFields map[stri
attributes = append(attributes, attributeNode{
Title: k,
Type: typeString,
Description: getDescription(v, true),
Description: getDescription(v),
Link: reference,
})
}
@ -184,8 +184,8 @@ func getAttributes(props, refs map[string]*jsonschema.Schema, ownFields map[stri
return attributes
}
func getDescription(s *jsonschema.Schema, allowMarkdown bool) string {
if allowMarkdown && s.MarkdownDescription != "" {
func getDescription(s *jsonschema.Schema) string {
if s.MarkdownDescription != "" {
return s.MarkdownDescription
}
return s.Description

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,8 @@
---
description: 'Configuration reference for databricks.yml'
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
# Configuration reference

View File

@ -1,70 +1,75 @@
---
description: 'Learn about resources supported by Databricks Asset Bundles and how to configure them.'
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
---
# Databricks Asset Bundles resources
# :re[DABS] resources
Databricks Asset Bundles allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).
:re[DABS] allows you to specify information about the :re[Databricks] resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](settings.md#resources) and [resources key reference](reference.md#resources).
This article outlines supported resource types for bundles and provides details and an example for each supported type. For additional examples, see [_](/dev-tools/bundles/resource-examples.md).
This article outlines supported resource types for bundles and provides details and an example for each supported type. For additional examples, see [\_](resource-examples.md).
## <a id="resource-types"></a> Supported resources
## <a id="resource-types"></a>Supported resources
The following table lists supported resource types for bundles. Some resources can be created by defining them in a bundle and deploying the bundle, and some resources only support referencing an existing resource to include in the bundle.
Resources are defined using the corresponding [Databricks REST API](/api/workspace/introduction) object's create operation request payload, where the object's supported fields, expressed as YAML, are the resource's supported properties. Links to documentation for each resource's corresponding payloads are listed in the table.
Resources are defined using the corresponding [Databricks REST API](https://docs.databricks.com/api/workspace/introduction) objects create operation request payload, where the objects supported fields, expressed as YAML, are the resources supported properties. Links to documentation for each resources corresponding payloads are listed in the table.
.. tip:: The `databricks bundle validate` command returns warnings if unknown resource properties are found in bundle configuration files.
:::tip
The `databricks bundle validate` command returns warnings if unknown resource properties are found in bundle configuration files.
.. list-table::
:header-rows: 1
:::
* - Resource
- Create support
- Corresponding REST API object
:::list-table
* - [cluster](#cluster)
- ✓
- [Cluster object](/api/workspace/clusters/create)
- - Resource
- Create support
- Corresponding REST API object
* - [dashboard](#dashboard)
-
- [Dashboard object](/api/workspace/lakeview/create)
- - [cluster](#clusters)
- ✓
- [Cluster object](https://docs.databricks.com/api/workspace/clusters/create)
* - [experiment](#experiment)
-
- [Experiment object](/api/workspace/experiments/createexperiment)
- - [dashboard](#dashboards)
-
- [Dashboard object](https://docs.databricks.com/api/workspace/lakeview/create)
* - [job](#job)
- ✓
- [Job object](/api/workspace/jobs/create)
- - [experiment](#experiments)
- ✓
- [Experiment object](https://docs.databricks.com/api/workspace/experiments/createexperiment)
* - [model (legacy)](#model-legacy)
- ✓
- [Model (legacy) object](/api/workspace/modelregistry/createmodel)
- - [job](#jobs)
- ✓
- [Job object](https://docs.databricks.com/api/workspace/jobs/create)
* - [model_serving_endpoint](#model-serving-endpoint)
- ✓
- [Model serving endpoint object](/api/workspace/servingendpoints/create)
- - [model (legacy)](#models)
- ✓
- [Model (legacy) object](https://docs.databricks.com/api/workspace/modelregistry/createmodel)
* - [pipeline](#pipeline)
- ✓
- [Pipeline object](/api/workspace/pipelines/create)
- - [model_serving_endpoint](#model_serving_endpoints)
- ✓
- [Model serving endpoint object](https://docs.databricks.com/api/workspace/servingendpoints/create)
* - [quality_monitor](#quality-monitor)
- ✓
- [Quality monitor object](/api/workspace/qualitymonitors/create)
- - [pipeline](#pipelines)
- ✓
- [Pipeline object](https://docs.databricks.com/api/workspace/pipelines/create)
* - [registered_model](#registered-model) (<UC>)
- ✓
- [Registered model object](/api/workspace/registeredmodels/create)
- - [quality_monitor](#quality_monitors)
- ✓
- [Quality monitor object](https://docs.databricks.com/api/workspace/qualitymonitors/create)
* - [schema](#schema) (<UC>)
- ✓
- [Schema object](/api/workspace/schemas/create)
- - [registered_model](#registered_models) (Unity Catalog)
- ✓
- [Registered model object](https://docs.databricks.com/api/workspace/registeredmodels/create)
* - [volume](#volume) (<UC>)
- ✓
- [Volume object](/api/workspace/volumes/create)
- - [schema](#schemas) (Unity Catalog)
- ✓
- [Schema object](https://docs.databricks.com/api/workspace/schemas/create)
- - [volume](#volumes) (Unity Catalog)
- ✓
- [Volume object](https://docs.databricks.com/api/workspace/volumes/create)
:::

View File

@ -220,7 +220,7 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
github.com/databricks/cli/bundle/config/resources.Pipeline:
"_":
"markdown_description": |-
The pipeline resource allows you to create <DLT> [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
The pipeline resource allows you to create Delta Live Tables [pipelines](/api/workspace/pipelines/create). For information about pipelines, see [_](/delta-live-tables/index.md). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [_](/dev-tools/bundles/pipelines-tutorial.md).
"markdown_examples": |-
The following example defines a pipeline with the resource key `hello-pipeline`:
@ -563,3 +563,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
"model_version":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
"abfss":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.Environment:
"dependencies":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
"granularities":
"description": |
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
"granularities":
"description": |
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
"log_analytics_primary_key":
"description": |-
PLACEHOLDER
"log_analytics_workspace_id":
"description": |-
PLACEHOLDER

View File

@ -445,7 +445,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a \u003cDABS\u003e template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
"markdownDescription": "The job resource allows you to define [jobs and their corresponding tasks](https://docs.databricks.com/api/workspace/jobs/create) in your bundle. For information about jobs, see [link](https://docs.databricks.com/jobs/index.html). For a tutorial that uses a Databricks Asset Bundles template to create a job, see [link](https://docs.databricks.com/dev-tools/bundles/jobs-tutorial.html)."
},
{
"type": "string",
@ -537,7 +537,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use \u003cUC\u003e [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
"markdownDescription": "The model resource allows you to define [legacy models](https://docs.databricks.com/api/workspace/modelregistry/createmodel) in bundles. Databricks recommends you use Unity Catalog [registered models](https://docs.databricks.com/dev-tools/bundles/reference.html#registered-model) instead."
},
{
"type": "string",
@ -725,7 +725,7 @@
}
},
"additionalProperties": false,
"markdownDescription": "The pipeline resource allows you to create \u003cDLT\u003e [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the \u003cDABS\u003e template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
"markdownDescription": "The pipeline resource allows you to create Delta Live Tables [pipelines](https://docs.databricks.com/api/workspace/pipelines/create). For information about pipelines, see [link](https://docs.databricks.com/delta-live-tables/index.html). For a tutorial that uses the Databricks Asset Bundles template to create a pipeline, see [link](https://docs.databricks.com/dev-tools/bundles/pipelines-tutorial.html)."
},
{
"type": "string",
@ -800,7 +800,7 @@
"assets_dir",
"output_schema_name"
],
"markdownDescription": "The quality_monitor resource allows you to define a \u003cUC\u003e [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
"markdownDescription": "The quality_monitor resource allows you to define a Unity Catalog [table monitor](https://docs.databricks.com/api/workspace/qualitymonitors/create). For information about monitors, see [link](https://docs.databricks.com/machine-learning/model-serving/monitor-diagnose-endpoints.html)."
},
{
"type": "string",
@ -843,7 +843,7 @@
"name",
"schema_name"
],
"markdownDescription": "The registered model resource allows you to define models in \u003cUC\u003e. For information about \u003cUC\u003e [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
"markdownDescription": "The registered model resource allows you to define models in Unity Catalog. For information about Unity Catalog [registered models](https://docs.databricks.com/api/workspace/registeredmodels/create), see [link](https://docs.databricks.com/machine-learning/manage-model-lifecycle/index.html)."
},
{
"type": "string",
@ -884,7 +884,7 @@
"catalog_name",
"name"
],
"markdownDescription": "The schema resource type allows you to define \u003cUC\u003e [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
"markdownDescription": "The schema resource type allows you to define Unity Catalog [schemas](https://docs.databricks.com/api/workspace/schemas/create) for tables and other assets in your workflows and pipelines created as part of a bundle. A schema, different from other resource types, has the following limitations:\n\n- The owner of a schema resource is always the deployment user, and cannot be changed. If `run_as` is specified in the bundle, it will be ignored by operations on the schema.\n- Only fields supported by the corresponding [Schemas object create API](https://docs.databricks.com/api/workspace/schemas/create) are available for the schema resource. For example, `enable_predictive_optimization` is not supported as it is only available on the [update API](https://docs.databricks.com/api/workspace/schemas/update)."
},
{
"type": "string",
@ -930,7 +930,7 @@
"name",
"schema_name"
],
"markdownDescription": "The volume resource type allows you to define and create \u003cUC\u003e [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use \u003cDABS\u003e to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
"markdownDescription": "The volume resource type allows you to define and create Unity Catalog [volumes](https://docs.databricks.com/api/workspace/volumes/create) as part of a bundle. When deploying a bundle with a volume defined, note that:\n\n- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use Databricks Asset Bundles to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.\n\n- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [custom-presets](https://docs.databricks.com/dev-tools/bundles/deployment-modes.html#custom-presets)."
},
{
"type": "string",
@ -1403,9 +1403,9 @@
"markdownDescription": "The quality monitor definitions for the bundle, where each key is the name of the quality monitor. See [quality_monitors](https://docs.databricks.com/dev-tools/bundles/resources.html#quality_monitors)"
},
"registered_models": {
"description": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model.",
"description": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel",
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the \u003cUC\u003e registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
"markdownDescription": "The registered model definitions for the bundle, where each key is the name of the Unity Catalog registered model. See [registered_models](https://docs.databricks.com/dev-tools/bundles/resources.html#registered_models)"
},
"schemas": {
"description": "The schema definitions for the bundle, where each key is the name of the schema.",
@ -2087,7 +2087,7 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"description": "PLACEHOLDER\n",
"$ref": "#/$defs/slice/string"
},
"label_col": {
@ -2244,7 +2244,7 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"description": "PLACEHOLDER\n",
"$ref": "#/$defs/slice/string"
},
"timestamp_col": {
@ -2725,7 +2725,6 @@
"$ref": "#/$defs/string"
},
"dependencies": {
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]",
"$ref": "#/$defs/slice/string"
}
},
@ -2821,7 +2820,6 @@
"type": "object",
"properties": {
"abfss": {
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
},
"dbfs": {
@ -2929,11 +2927,9 @@
"type": "object",
"properties": {
"log_analytics_primary_key": {
"description": "\u003cneeds content added\u003e",
"$ref": "#/$defs/string"
},
"log_analytics_workspace_id": {
"description": "\u003cneeds content added\u003e",
"$ref": "#/$defs/string"
}
},
@ -7265,12 +7261,12 @@
"resources": {
"description": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources",
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about \u003cDABS\u003e supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
"markdownDescription": "A Map that defines the resources for the bundle, where each key is the name of the resource, and the value is a Map that defines the resource. For more information about Databricks Asset Bundles supported resources, and resource definition reference, see [link](https://docs.databricks.com/dev-tools/bundles/resources.html).\n\n```yaml\nresources:\n \u003cresource-type\u003e:\n \u003cresource-name\u003e:\n \u003cresource-field-name\u003e: \u003cresource-field-value\u003e\n```"
},
"run_as": {
"description": "The identity to use when running \u003cDABS\u003e workflows.",
"description": "The identity to use when running Databricks Asset Bundles workflows.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs",
"markdownDescription": "The identity to use when running \u003cDABS\u003e workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
"markdownDescription": "The identity to use when running Databricks Asset Bundles workflows. See [link](https://docs.databricks.com/dev-tools/bundles/run-as.html)."
},
"sync": {
"description": "The files and file paths to include or exclude in the bundle.",