Compare commits

...

5 Commits

Author SHA1 Message Date
Ilya Kuznetsov 5194889605
fix: Cleanup 2024-12-12 13:05:55 +01:00
Ilya Kuznetsov aed0e0a92f
fix: Missing annotations 2024-12-12 13:01:49 +01:00
Ilya Kuznetsov 43c4b581d4
feat: Use OneOf in interpolation patterns 2024-12-12 12:44:36 +01:00
Ilya Kuznetsov d3b30f73ec
fix: Schema regenerate 2024-12-11 15:39:08 +01:00
Ilya Kuznetsov 40505b867a
fix: Annotations cleanup 2024-12-11 14:31:53 +01:00
7 changed files with 255 additions and 238 deletions

View File

@ -170,32 +170,29 @@ func convertLinksToAbsoluteUrl(s string) string {
base := "https://docs.databricks.com"
referencePage := "/dev-tools/bundles/reference.html"
// Regular expression to match Markdown-style links
// Regular expression to match Markdown-style links like [_](link)
re := regexp.MustCompile(`\[_\]\(([^)]+)\)`)
result := re.ReplaceAllStringFunc(s, func(match string) string {
// Extract the URL inside parentheses
matches := re.FindStringSubmatch(match)
if len(matches) < 2 {
return match // Return original if no match found
return match
}
link := matches[1]
var text, absoluteURL string
if strings.HasPrefix(link, "#") {
text = strings.TrimPrefix(link, "#")
absoluteURL = fmt.Sprintf("%s%s%s", base, referencePage, link)
} else if strings.HasPrefix(link, "/") {
// Handle relative paths like /dev-tools/bundles/resources.html#dashboard
} else if strings.HasPrefix(link, "/") {
absoluteURL = strings.ReplaceAll(fmt.Sprintf("%s%s", base, link), ".md", ".html")
if strings.Contains(link, "#") {
parts := strings.Split(link, "#")
text = parts[1]
absoluteURL = fmt.Sprintf("%s%s", base, link)
} else {
text = "link"
absoluteURL = fmt.Sprintf("%s%s", base, link)
}
absoluteURL = strings.ReplaceAll(absoluteURL, ".md", ".html")
} else {
return match
}

View File

@ -19,7 +19,9 @@ github.com/databricks/cli/bundle/config.Artifact:
The location where the built artifact will be saved.
"type":
"description": |-
The type of the artifact. Valid values are wheel or jar.
The type of the artifact.
"markdown_description": |-
The type of the artifact. Valid values are `wheel` or `jar`
github.com/databricks/cli/bundle/config.ArtifactFile:
"source":
"description": |-
@ -191,7 +193,9 @@ github.com/databricks/cli/bundle/config.Root:
Defines attributes for experimental features.
"include":
"description": |-
PLACEHOLDER
Specifies a list of path globs that contain configuration files to include within the bundle.
"markdown_description": |-
Specifies a list of path globs that contain configuration files to include within the bundle. See [_](/dev-tools/bundles/settings.md#include)
"permissions":
"description": |-
Defines the permissions to apply to experiments, jobs, pipelines, and models defined in the bundle
@ -204,9 +208,9 @@ github.com/databricks/cli/bundle/config.Root:
Defines bundle deployment presets. See [_](/dev-tools/bundles/deployment-modes.md#presets).
"resources":
"description": |-
PLACEHOLDER
Specifies information about the Databricks resources used by the bundle
"markdown_description": |-
See [_](/dev-tools/bundles/resources.md).
Specifies information about the Databricks resources used by the bundle. See [_](/dev-tools/bundles/resources.md).
"run_as":
"description": |-
The identity to use to run the bundle.
@ -259,9 +263,9 @@ github.com/databricks/cli/bundle/config.Target:
The Git version control settings for the target. See [_](#git).
"mode":
"description": |-
The deployment mode for the target. Valid values are development or production.
The deployment mode for the target.
"markdown_description": |-
The deployment mode for the target. Valid values are development or production. See [_](/dev-tools/bundles/deployment-modes.md).
The deployment mode for the target. Valid values are `development` or `production`. See [_](/dev-tools/bundles/deployment-modes.md).
"permissions":
"description": |-
The permissions for deploying and running the bundle in the target.
@ -294,7 +298,7 @@ github.com/databricks/cli/bundle/config.Target:
The custom variable definitions for the target. See [_](/dev-tools/bundles/settings.md#variables) and [_](/dev-tools/bundles/variables.md).
"workspace":
"description": |-
The Databricks workspace for the target. _.
The Databricks workspace for the target.
"markdown_description": |-
The Databricks workspace for the target. [_](#workspace)
github.com/databricks/cli/bundle/config.Workspace:
@ -409,7 +413,7 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
PLACEHOLDER
"description":
"description": |-
The description of the variable
The description of the variable.
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
@ -418,7 +422,7 @@ github.com/databricks/cli/bundle/config/variable.TargetVariable:
The type of the variable.
"markdown_description":
"description": |-
The type of the variable. Valid values are `complex`.
The type of the variable.
github.com/databricks/cli/bundle/config/variable.Variable:
"default":
"description": |-
@ -429,9 +433,11 @@ github.com/databricks/cli/bundle/config/variable.Variable:
"lookup":
"description": |-
The name of the alert, cluster_policy, cluster, dashboard, instance_pool, job, metastore, pipeline, query, service_principal, or warehouse object for which to retrieve an ID.
"markdown_description": |-
The name of the `alert`, `cluster_policy`, `cluster`, `dashboard`, `instance_pool`, `job`, `metastore`, `pipeline`, `query`, `service_principal`, or `warehouse` object for which to retrieve an ID."
"type":
"description": |-
The type of the variable. Valid values are complex.
The type of the variable.
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
"ai21labs_api_key":
"description": |-

View File

@ -44,7 +44,8 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
case jsonschema.ArrayType, jsonschema.ObjectType:
// arrays and objects can have complex variable values specified.
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{
// OneOf is used because we don't expect more than 1 match and schema-based auto-complete works better with OneOf
OneOf: []jsonschema.Schema{
s,
{
Type: jsonschema.StringType,
@ -55,7 +56,7 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
// primitives can have variable values, or references like ${bundle.xyz}
// or ${workspace.xyz}
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{
OneOf: []jsonschema.Schema{
s,
{Type: jsonschema.StringType, Pattern: interpolationPattern("resources")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},

View File

@ -0,0 +1,5 @@
targets:
production:
variables:
myvar:
default: true

View File

@ -41,32 +41,32 @@ func TestJsonSchema(t *testing.T) {
resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job")
fields := []string{"name", "continuous", "tasks", "trigger"}
for _, field := range fields {
assert.NotEmpty(t, resourceJob.AnyOf[0].Properties[field].Description)
assert.NotEmpty(t, resourceJob.OneOf[0].Properties[field].Description)
}
// Assert descriptions were also loaded for a job task definition.
jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task")
fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"}
for _, field := range fields {
assert.NotEmpty(t, jobTask.AnyOf[0].Properties[field].Description)
assert.NotEmpty(t, jobTask.OneOf[0].Properties[field].Description)
}
// Assert descriptions are loaded for pipelines
pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline")
fields = []string{"name", "catalog", "clusters", "channel", "continuous", "development"}
for _, field := range fields {
assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description)
assert.NotEmpty(t, pipeline.OneOf[0].Properties[field].Description)
}
// Assert enum values are loaded
schedule := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "pipelines.RestartWindow")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "MONDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "TUESDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "WEDNESDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "THURSDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "FRIDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "SATURDAY")
assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "SUNDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "MONDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "TUESDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "WEDNESDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "THURSDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "FRIDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "SATURDAY")
assert.Contains(t, schedule.OneOf[0].Properties["days_of_week"].Enum, "SUNDAY")
providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider")
assert.Contains(t, providers.Enum, "gitHub")

File diff suppressed because it is too large Load Diff

View File

@ -70,6 +70,9 @@ type Schema struct {
// Schema that must match any of the schemas in the array
AnyOf []Schema `json:"anyOf,omitempty"`
// Schema that must match one of the schemas in the array
OneOf []Schema `json:"oneOf,omitempty"`
// Title of the object, rendered as inline documentation in the IDE.
// https://json-schema.org/understanding-json-schema/reference/annotations
Title string `json:"title,omitempty"`