Compare commits

..

No commits in common. "5194889605c9d24b03a2e49317e1a1f266b3eb0d" and "8f59695d0d2ddbfab7b29a9b86061e280d5c802b" have entirely different histories.

7 changed files with 238 additions and 255 deletions

View File

@ -170,29 +170,32 @@ func convertLinksToAbsoluteUrl(s string) string {
base := "https://docs.databricks.com"
referencePage := "/dev-tools/bundles/reference.html"
// Regular expression to match Markdown-style links like [_](link)
// Regular expression to match Markdown-style links
re := regexp.MustCompile(`\[_\]\(([^)]+)\)`)
result := re.ReplaceAllStringFunc(s, func(match string) string {
// Extract the URL inside parentheses
matches := re.FindStringSubmatch(match)
if len(matches) < 2 {
return match
return match // Return original if no match found
}
link := matches[1]
var text, absoluteURL string
if strings.HasPrefix(link, "#") {
text = strings.TrimPrefix(link, "#")
absoluteURL = fmt.Sprintf("%s%s%s", base, referencePage, link)
// Handle relative paths like /dev-tools/bundles/resources.html#dashboard
} else if strings.HasPrefix(link, "/") {
absoluteURL = strings.ReplaceAll(fmt.Sprintf("%s%s", base, link), ".md", ".html")
// Handle relative paths like /dev-tools/bundles/resources.html#dashboard
if strings.Contains(link, "#") {
parts := strings.Split(link, "#")
text = parts[1]
absoluteURL = fmt.Sprintf("%s%s", base, link)
} else {
text = "link"
absoluteURL = fmt.Sprintf("%s%s", base, link)
}
absoluteURL = strings.ReplaceAll(absoluteURL, ".md", ".html")
} else {
return match
}

View File

@ -19,9 +19,7 @@ github.com/databricks/cli/bundle/config.Artifact:
The location where the built artifact will be saved.
"type":
"description": |-
The type of the artifact.
"markdown_description": |-
The type of the artifact. Valid values are `wheel` or `jar`
The type of the artifact. Valid values are wheel or jar.
github.com/databricks/cli/bundle/config.ArtifactFile:
"source":
"description": |-
@ -193,9 +191,7 @@ github.com/databricks/cli/bundle/config.Root:
Defines attributes for experimental features.
"include":
"description": |-
Specifies a list of path globs that contain configuration files to include within the bundle.
"markdown_description": |-
Specifies a list of path globs that contain configuration files to include within the bundle. See [_](/dev-tools/bundles/settings.md#include)
PLACEHOLDER
"permissions":
"description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle
@ -208,9 +204,9 @@ github.com/databricks/cli/bundle/config.Root:
Defines bundle deployment presets. See [_](/dev-tools/bundles/deployment-modes.md#presets).
"resources":
"description": |-
Specifies information about the Databricks resources used by the bundle
PLACEHOLDER
"markdown_description": |-
Specifies information about the Databricks resources used by the bundle. See [_](/dev-tools/bundles/resources.md).
See [_](/dev-tools/bundles/resources.md).
"run_as":
"description": |-
The identity to use to run the bundle.
@ -263,9 +259,9 @@ github.com/databricks/cli/bundle/config.Target:
The Git version control settings for the target. See [_](#git).
"mode":
"description": |-
The deployment mode for the target.
The deployment mode for the target. Valid values are development or production.
"markdown_description": |-
The deployment mode for the target. Valid values are `development` or `production`. See [_](/dev-tools/bundles/deployment-modes.md).
The deployment mode for the target. Valid values are development or production. See [_](/dev-tools/bundles/deployment-modes.md).
"permissions":
"description": |-
The permissions for deploying and running the bundle in the target.
@ -298,7 +294,7 @@ github.com/databricks/cli/bundle/config.Target:
The custom variable definitions for the target. See [_](/dev-tools/bundles/settings.md#variables) and [_](/dev-tools/bundles/variables.md).
"workspace":
"description": |-
The Databricks workspace for the target.
The Databricks workspace for the target. _.
"markdown_description": |-
The Databricks workspace for the target. [_](#workspace)
github.com/databricks/cli/bundle/config.Workspace:
@ -413,7 +409,7 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
PLACEHOLDER
"description":
"description": |-
The description of the variable.
The description of the variable
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
@ -422,7 +418,7 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
The type of the variable.
"markdown_description":
"description": |-
The type of the variable.
The type of the variable. Valid values are `complex`.
github.com/databricks/cli/bundle/config/variable.Variable:
"default":
"description": |-
@ -433,11 +429,9 @@ github.com/databricks/cli/bundle/config/variable.Variable:
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"markdown_description": |-
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
"type":
"description": |-
The type of the variable.
The type of the variable. Valid values are complex.
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
"ai21labs_api_key":
"description": |-

View File

@ -44,8 +44,7 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
case jsonschema.ArrayType, jsonschema.ObjectType:
// arrays and objects can have complex variable values specified.
return jsonschema.Schema{
// OneOf is used because we don't expect more than 1 match and schema-based auto-complete works better with OneOf
OneOf: []jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{
Type: jsonschema.StringType,
@ -56,7 +55,7 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
// primitives can have variable values, or references like ${bundle.xyz}
// or ${workspace.xyz}
return jsonschema.Schema{
OneOf: []jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{Type: jsonschema.StringType, Pattern: interpolationPattern("resources")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},

View File

@ -1,5 +0,0 @@
targets:
production:
variables:
myvar:
default: true

View File

@ -41,32 +41,32 @@ func TestJsonSchema(t *testing.T) {
resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job")
fields := []string{"name", "continuous", "tasks", "trigger"}
for _, field := range fields {
assert.NotEmpty(t, resourceJob.OneOf[0].Properties[field].Description)
assert.NotEmpty(t, resourceJob.AnyOf[0].Properties[field].Description)
}
// Assert descriptions were also loaded for a job task definition.
jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task")
fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"}
for _, field := range fields {
assert.NotEmpty(t, jobTask.OneOf[0].Properties[field].Description)
assert.NotEmpty(t, jobTask.AnyOf[0].Properties[field].Description)
}
// Assert descriptions are loaded for pipelines
pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline")
fields = []string{"name", "catalog", "clusters", "channel", "continuous", "development"}
for _, field := range fields {
assert.NotEmpty(t, pipeline.OneOf[0].Properties[field].Description)
assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description)
}
// Assert enum values are loaded
schedule := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "pipelines.RestartWindow")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "MONDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "TUESDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "WEDNESDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "THURSDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "FRIDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "SATURDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "SUNDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "MONDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "TUESDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "WEDNESDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "THURSDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "FRIDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "SATURDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "SUNDAY")
providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider")
assert.Contains(t, providers.Enum, "gitHub")

File diff suppressed because it is too large Load Diff

View File

@ -70,9 +70,6 @@ type Schema struct {
// Schema that must match any of the schemas in the array
AnyOf []Schema `json:"anyOf,omitempty"`
// Schema that must match one of the schemas in the array
OneOf []Schema `json:"oneOf,omitempty"`
// Title of the object, rendered as inline documentation in the IDE.
// https://json-schema.org/understanding-json-schema/reference/annotations
Title string `json:"title,omitempty"`