Compare commits

...

3 Commits

10 changed files with 2963 additions and 3096 deletions

View File

@ -40,4 +40,4 @@ vendor:
schema: schema:
@echo "✓ Generating json-schema ..." @echo "✓ Generating json-schema ..."
@go run $(shell find ./bundle/internal/schema -name "*.go" ! -name "*_test.go") ./bundle/internal/schema ./bundle/schema/jsonschema.json @go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json

View File

@ -2,7 +2,6 @@ package main
import ( import (
"bytes" "bytes"
"fmt"
"os" "os"
"reflect" "reflect"
"strings" "strings"
@ -13,6 +12,7 @@ import (
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader" "github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/dyn/yamlsaver"
"github.com/databricks/cli/libs/jsonschema" "github.com/databricks/cli/libs/jsonschema"
) )
@ -25,10 +25,12 @@ type annotation struct {
} }
type annotationHandler struct { type annotationHandler struct {
ref map[string]annotation ref annotationFile
empty map[string]annotation empty annotationFile
} }
type annotationFile map[string]map[string]annotation
const Placeholder = "PLACEHOLDER" const Placeholder = "PLACEHOLDER"
// Adds annotations to the JSON schema reading from the annotation files. // Adds annotations to the JSON schema reading from the annotation files.
@ -50,7 +52,7 @@ func newAnnotationHandler(sources []string) (*annotationHandler, error) {
} }
} }
var data map[string]annotation var data annotationFile
err := convert.ToTyped(&data, prev) err := convert.ToTyped(&data, prev)
if err != nil { if err != nil {
@ -59,45 +61,43 @@ func newAnnotationHandler(sources []string) (*annotationHandler, error) {
d := &annotationHandler{} d := &annotationHandler{}
d.ref = data d.ref = data
d.empty = map[string]annotation{} d.empty = annotationFile{}
return d, nil return d, nil
} }
func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema { func (d *annotationHandler) addAnnotations(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
refPath := jsonschema.TypePath(typ) refPath := getPath(typ)
items := map[string]*jsonschema.Schema{} shouldHandle := strings.HasPrefix(refPath, "github.com")
items[refPath] = &s if !shouldHandle {
return s
for k, v := range s.Properties {
itemRefPath := fmt.Sprintf("%s.%s", refPath, k)
items[itemRefPath] = v
} }
for k, v := range items { annotations := d.ref[refPath]
// Skipping all default types like int, string, etc. if annotations == nil {
shouldHandle := strings.HasPrefix(refPath, "github.com") annotations = map[string]annotation{}
if !shouldHandle { }
continue
}
item, ok := d.ref[k] rootTypeAnnotation, ok := annotations[RootTypeKey]
if ok {
assingAnnotation(&s, rootTypeAnnotation)
}
for k, v := range s.Properties {
item, ok := annotations[k]
if !ok { if !ok {
item = annotation{} item = annotation{}
} }
if item.Description == "" { if item.Description == "" {
item.Description = Placeholder item.Description = Placeholder
d.empty[k] = item
}
if item.Description != Placeholder {
v.Description = item.Description
}
if item.Default != nil { emptyAnnotations := d.empty[refPath]
v.Default = item.Default if emptyAnnotations == nil {
emptyAnnotations = map[string]annotation{}
d.empty[refPath] = emptyAnnotations
}
emptyAnnotations[k] = item
} }
v.MarkdownDescription = item.MarkdownDescription assingAnnotation(v, item)
v.Title = item.Title
v.Enum = item.Enum
} }
return s return s
} }
@ -109,22 +109,46 @@ func (d *annotationHandler) sync(outputPath string) error {
return err return err
} }
existing := map[string]annotation{} existing, err := yamlloader.LoadYAML(outputPath, bytes.NewBuffer(file))
err = yaml.Unmarshal(file, &existing) if err != nil {
return err
}
emptyB, err := yaml.Marshal(d.empty)
if err != nil {
return err
}
for k, v := range d.empty { empty, err := yamlloader.LoadYAML("", bytes.NewBuffer(emptyB))
existing[k] = v
}
if err != nil { if err != nil {
return err return err
} }
b, err := yaml.Marshal(existing) mergedFile, err := merge.Merge(existing, empty)
if err != nil { if err != nil {
return err return err
} }
err = os.WriteFile(outputPath, b, 0644)
saver := yamlsaver.NewSaver()
config, _ := mergedFile.AsMap()
err = saver.SaveAsYAML(config, outputPath, true)
if err != nil { if err != nil {
return err return err
} }
return nil return nil
} }
func getPath(typ reflect.Type) string {
return typ.PkgPath() + "." + typ.Name()
}
func assingAnnotation(s *jsonschema.Schema, a annotation) {
if a.Description != Placeholder {
s.Description = a.Description
}
if a.Default != nil {
s.Default = a.Default
}
s.MarkdownDescription = a.MarkdownDescription
s.Title = a.Title
s.Enum = a.Enum
}

View File

@ -1,325 +1,289 @@
github.com/databricks/cli/bundle/config.Artifact: github.com/databricks/cli/bundle/config.Artifact:
description: Defines the attributes to build an artifact. build:
title: Artifact description: An optional set of non-default build commands that you want to run locally before deployment. For Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment. To specify multiple build commands, separate each command with double-ampersand (&&) characters.
github.com/databricks/cli/bundle/config.Artifact.build: executable:
description: The command to build the artifact. description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Artifact.executable: files:
description: PLACEHOLDER description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Artifact.files: path:
description: PLACEHOLDER description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Artifact.path: type:
description: PLACEHOLDER description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Artifact.type:
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.ArtifactFile: github.com/databricks/cli/bundle/config.ArtifactFile:
description: PLACEHOLDER source:
github.com/databricks/cli/bundle/config.ArtifactFile.source: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.ArtifactType:
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Bundle: github.com/databricks/cli/bundle/config.Bundle:
description: PLACEHOLDER cluster_id:
github.com/databricks/cli/bundle/config.Bundle.cluster_id: description: PLACEHOLDER
description: PLACEHOLDER compute_id:
github.com/databricks/cli/bundle/config.Bundle.compute_id: description: PLACEHOLDER
description: PLACEHOLDER databricks_cli_version:
github.com/databricks/cli/bundle/config.Bundle.databricks_cli_version: description: PLACEHOLDER
description: PLACEHOLDER deployment:
github.com/databricks/cli/bundle/config.Bundle.deployment: description: PLACEHOLDER
description: PLACEHOLDER git:
github.com/databricks/cli/bundle/config.Bundle.git: description: PLACEHOLDER
description: PLACEHOLDER name:
github.com/databricks/cli/bundle/config.Bundle.name: description: PLACEHOLDER
description: PLACEHOLDER uuid:
github.com/databricks/cli/bundle/config.Bundle.uuid: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Command:
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Deployment: github.com/databricks/cli/bundle/config.Deployment:
description: PLACEHOLDER fail_on_active_runs:
github.com/databricks/cli/bundle/config.Deployment.fail_on_active_runs: description: PLACEHOLDER
description: PLACEHOLDER lock:
github.com/databricks/cli/bundle/config.Deployment.lock: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Experimental: github.com/databricks/cli/bundle/config.Experimental:
description: PLACEHOLDER pydabs:
github.com/databricks/cli/bundle/config.Experimental.pydabs: description: PLACEHOLDER
description: PLACEHOLDER python_wheel_wrapper:
github.com/databricks/cli/bundle/config.Experimental.python_wheel_wrapper: description: PLACEHOLDER
description: PLACEHOLDER scripts:
github.com/databricks/cli/bundle/config.Experimental.scripts: description: PLACEHOLDER
description: PLACEHOLDER use_legacy_run_as:
github.com/databricks/cli/bundle/config.Experimental.use_legacy_run_as: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Git: github.com/databricks/cli/bundle/config.Git:
description: PLACEHOLDER branch:
github.com/databricks/cli/bundle/config.Git.branch: description: PLACEHOLDER
description: PLACEHOLDER origin_url:
github.com/databricks/cli/bundle/config.Git.origin_url: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Lock: github.com/databricks/cli/bundle/config.Lock:
description: PLACEHOLDER enabled:
github.com/databricks/cli/bundle/config.Lock.enabled: description: PLACEHOLDER
description: PLACEHOLDER force:
github.com/databricks/cli/bundle/config.Lock.force: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Mode:
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Presets: github.com/databricks/cli/bundle/config.Presets:
description: PLACEHOLDER jobs_max_concurrent_runs:
github.com/databricks/cli/bundle/config.Presets.jobs_max_concurrent_runs: description: PLACEHOLDER
description: PLACEHOLDER name_prefix:
github.com/databricks/cli/bundle/config.Presets.name_prefix: description: PLACEHOLDER
description: PLACEHOLDER pipelines_development:
github.com/databricks/cli/bundle/config.Presets.pipelines_development: description: PLACEHOLDER
description: PLACEHOLDER source_linked_deployment:
github.com/databricks/cli/bundle/config.Presets.source_linked_deployment: description: PLACEHOLDER
description: PLACEHOLDER tags:
github.com/databricks/cli/bundle/config.Presets.tags: description: PLACEHOLDER
description: PLACEHOLDER trigger_pause_status:
github.com/databricks/cli/bundle/config.Presets.trigger_pause_status: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.PyDABs: github.com/databricks/cli/bundle/config.PyDABs:
description: PLACEHOLDER enabled:
github.com/databricks/cli/bundle/config.PyDABs.enabled: description: PLACEHOLDER
description: PLACEHOLDER import:
github.com/databricks/cli/bundle/config.PyDABs.import: description: PLACEHOLDER
description: PLACEHOLDER venv_path:
github.com/databricks/cli/bundle/config.PyDABs.venv_path: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Resources: github.com/databricks/cli/bundle/config.Resources:
description: PLACEHOLDER clusters:
github.com/databricks/cli/bundle/config.Resources.clusters: description: PLACEHOLDER
description: PLACEHOLDER dashboards:
github.com/databricks/cli/bundle/config.Resources.dashboards: description: PLACEHOLDER
description: PLACEHOLDER experiments:
github.com/databricks/cli/bundle/config.Resources.experiments: description: PLACEHOLDER
description: PLACEHOLDER jobs:
github.com/databricks/cli/bundle/config.Resources.jobs: description: PLACEHOLDER
description: PLACEHOLDER model_serving_endpoints:
github.com/databricks/cli/bundle/config.Resources.model_serving_endpoints: description: PLACEHOLDER
description: PLACEHOLDER models:
github.com/databricks/cli/bundle/config.Resources.models: description: PLACEHOLDER
description: PLACEHOLDER pipelines:
github.com/databricks/cli/bundle/config.Resources.pipelines: description: PLACEHOLDER
description: PLACEHOLDER quality_monitors:
github.com/databricks/cli/bundle/config.Resources.quality_monitors: description: PLACEHOLDER
description: PLACEHOLDER registered_models:
github.com/databricks/cli/bundle/config.Resources.registered_models: description: PLACEHOLDER
description: PLACEHOLDER schemas:
github.com/databricks/cli/bundle/config.Resources.schemas: description: PLACEHOLDER
description: PLACEHOLDER volumes:
github.com/databricks/cli/bundle/config.Resources.volumes: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Root: github.com/databricks/cli/bundle/config.Root:
description: PLACEHOLDER artifacts:
github.com/databricks/cli/bundle/config.Root.artifacts: description: Defines the attributes to build an artifact
description: PLACEHOLDER bundle:
github.com/databricks/cli/bundle/config.Root.bundle: description: PLACEHOLDER
description: PLACEHOLDER experimental:
github.com/databricks/cli/bundle/config.Root.experimental: description: PLACEHOLDER
description: PLACEHOLDER include:
github.com/databricks/cli/bundle/config.Root.include: description: PLACEHOLDER
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config.Root.permissions: description: PLACEHOLDER
description: PLACEHOLDER presets:
github.com/databricks/cli/bundle/config.Root.presets: description: PLACEHOLDER
description: PLACEHOLDER resources:
github.com/databricks/cli/bundle/config.Root.resources: description: PLACEHOLDER
description: PLACEHOLDER run_as:
github.com/databricks/cli/bundle/config.Root.run_as: description: PLACEHOLDER
description: PLACEHOLDER sync:
github.com/databricks/cli/bundle/config.Root.sync: description: PLACEHOLDER
description: PLACEHOLDER targets:
github.com/databricks/cli/bundle/config.Root.targets: description: PLACEHOLDER
description: PLACEHOLDER variables:
github.com/databricks/cli/bundle/config.Root.variables: description: PLACEHOLDER
description: PLACEHOLDER workspace:
github.com/databricks/cli/bundle/config.Root.workspace: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Sync: github.com/databricks/cli/bundle/config.Sync:
description: PLACEHOLDER exclude:
github.com/databricks/cli/bundle/config.Sync.exclude: description: PLACEHOLDER
description: PLACEHOLDER include:
github.com/databricks/cli/bundle/config.Sync.include: description: PLACEHOLDER
description: PLACEHOLDER paths:
github.com/databricks/cli/bundle/config.Sync.paths: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Target: github.com/databricks/cli/bundle/config.Target:
description: PLACEHOLDER artifacts:
github.com/databricks/cli/bundle/config.Target.artifacts: description: PLACEHOLDER
description: PLACEHOLDER bundle:
github.com/databricks/cli/bundle/config.Target.bundle: description: PLACEHOLDER
description: PLACEHOLDER cluster_id:
github.com/databricks/cli/bundle/config.Target.cluster_id: description: PLACEHOLDER
description: PLACEHOLDER compute_id:
github.com/databricks/cli/bundle/config.Target.compute_id: description: PLACEHOLDER
description: PLACEHOLDER default:
github.com/databricks/cli/bundle/config.Target.default: description: PLACEHOLDER
description: PLACEHOLDER git:
github.com/databricks/cli/bundle/config.Target.git: description: PLACEHOLDER
description: PLACEHOLDER mode:
github.com/databricks/cli/bundle/config.Target.mode: description: PLACEHOLDER
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config.Target.permissions: description: PLACEHOLDER
description: PLACEHOLDER presets:
github.com/databricks/cli/bundle/config.Target.presets: description: PLACEHOLDER
description: PLACEHOLDER resources:
github.com/databricks/cli/bundle/config.Target.resources: description: PLACEHOLDER
description: PLACEHOLDER run_as:
github.com/databricks/cli/bundle/config.Target.run_as: description: PLACEHOLDER
description: PLACEHOLDER sync:
github.com/databricks/cli/bundle/config.Target.sync: description: PLACEHOLDER
description: PLACEHOLDER variables:
github.com/databricks/cli/bundle/config.Target.variables: description: PLACEHOLDER
description: PLACEHOLDER workspace:
github.com/databricks/cli/bundle/config.Target.workspace: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config.Workspace: github.com/databricks/cli/bundle/config.Workspace:
description: PLACEHOLDER artifact_path:
github.com/databricks/cli/bundle/config.Workspace.artifact_path: description: PLACEHOLDER
description: PLACEHOLDER auth_type:
github.com/databricks/cli/bundle/config.Workspace.auth_type: description: PLACEHOLDER
description: PLACEHOLDER azure_client_id:
github.com/databricks/cli/bundle/config.Workspace.azure_client_id: description: PLACEHOLDER
description: PLACEHOLDER azure_environment:
github.com/databricks/cli/bundle/config.Workspace.azure_environment: description: PLACEHOLDER
description: PLACEHOLDER azure_login_app_id:
github.com/databricks/cli/bundle/config.Workspace.azure_login_app_id: description: PLACEHOLDER
description: PLACEHOLDER azure_tenant_id:
github.com/databricks/cli/bundle/config.Workspace.azure_tenant_id: description: PLACEHOLDER
description: PLACEHOLDER azure_use_msi:
github.com/databricks/cli/bundle/config.Workspace.azure_use_msi: description: PLACEHOLDER
description: PLACEHOLDER azure_workspace_resource_id:
github.com/databricks/cli/bundle/config.Workspace.azure_workspace_resource_id: description: PLACEHOLDER
description: PLACEHOLDER client_id:
github.com/databricks/cli/bundle/config.Workspace.client_id: description: PLACEHOLDER
description: PLACEHOLDER file_path:
github.com/databricks/cli/bundle/config.Workspace.file_path: description: PLACEHOLDER
description: PLACEHOLDER google_service_account:
github.com/databricks/cli/bundle/config.Workspace.google_service_account: description: PLACEHOLDER
description: PLACEHOLDER host:
github.com/databricks/cli/bundle/config.Workspace.host: description: PLACEHOLDER
description: PLACEHOLDER profile:
github.com/databricks/cli/bundle/config.Workspace.profile: description: PLACEHOLDER
description: PLACEHOLDER resource_path:
github.com/databricks/cli/bundle/config.Workspace.resource_path: description: PLACEHOLDER
description: PLACEHOLDER root_path:
github.com/databricks/cli/bundle/config.Workspace.root_path: description: PLACEHOLDER
description: PLACEHOLDER state_path:
github.com/databricks/cli/bundle/config.Workspace.state_path: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Grant: github.com/databricks/cli/bundle/config/resources.Grant:
description: PLACEHOLDER principal:
github.com/databricks/cli/bundle/config/resources.Grant.principal: description: PLACEHOLDER
description: PLACEHOLDER privileges:
github.com/databricks/cli/bundle/config/resources.Grant.privileges: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Permission: github.com/databricks/cli/bundle/config/resources.Permission:
description: PLACEHOLDER group_name:
github.com/databricks/cli/bundle/config/resources.Permission.group_name: description: PLACEHOLDER
description: PLACEHOLDER level:
github.com/databricks/cli/bundle/config/resources.Permission.level: description: PLACEHOLDER
description: PLACEHOLDER service_principal_name:
github.com/databricks/cli/bundle/config/resources.Permission.service_principal_name: description: PLACEHOLDER
description: PLACEHOLDER user_name:
github.com/databricks/cli/bundle/config/resources.Permission.user_name: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.Lookup: github.com/databricks/cli/bundle/config/variable.Lookup:
description: PLACEHOLDER alert:
github.com/databricks/cli/bundle/config/variable.Lookup.alert: description: PLACEHOLDER
description: PLACEHOLDER cluster:
github.com/databricks/cli/bundle/config/variable.Lookup.cluster: description: PLACEHOLDER
description: PLACEHOLDER cluster_policy:
github.com/databricks/cli/bundle/config/variable.Lookup.cluster_policy: description: PLACEHOLDER
description: PLACEHOLDER dashboard:
github.com/databricks/cli/bundle/config/variable.Lookup.dashboard: description: PLACEHOLDER
description: PLACEHOLDER instance_pool:
github.com/databricks/cli/bundle/config/variable.Lookup.instance_pool: description: PLACEHOLDER
description: PLACEHOLDER job:
github.com/databricks/cli/bundle/config/variable.Lookup.job: description: PLACEHOLDER
description: PLACEHOLDER metastore:
github.com/databricks/cli/bundle/config/variable.Lookup.metastore: description: PLACEHOLDER
description: PLACEHOLDER notification_destination:
github.com/databricks/cli/bundle/config/variable.Lookup.notification_destination: description: PLACEHOLDER
description: PLACEHOLDER pipeline:
github.com/databricks/cli/bundle/config/variable.Lookup.pipeline: description: PLACEHOLDER
description: PLACEHOLDER query:
github.com/databricks/cli/bundle/config/variable.Lookup.query: description: PLACEHOLDER
description: PLACEHOLDER service_principal:
github.com/databricks/cli/bundle/config/variable.Lookup.service_principal: description: PLACEHOLDER
description: PLACEHOLDER warehouse:
github.com/databricks/cli/bundle/config/variable.Lookup.warehouse: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.TargetVariable: github.com/databricks/cli/bundle/config/variable.TargetVariable:
description: PLACEHOLDER default:
github.com/databricks/cli/bundle/config/variable.TargetVariable.default: description: PLACEHOLDER
description: PLACEHOLDER description:
github.com/databricks/cli/bundle/config/variable.TargetVariable.description: description: PLACEHOLDER
description: PLACEHOLDER lookup:
github.com/databricks/cli/bundle/config/variable.TargetVariable.lookup: description: PLACEHOLDER
description: PLACEHOLDER type:
github.com/databricks/cli/bundle/config/variable.TargetVariable.type: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.Variable: github.com/databricks/cli/bundle/config/variable.Variable:
description: PLACEHOLDER default:
github.com/databricks/cli/bundle/config/variable.Variable.default: description: PLACEHOLDER
description: PLACEHOLDER description:
github.com/databricks/cli/bundle/config/variable.Variable.description: description: PLACEHOLDER
description: PLACEHOLDER lookup:
github.com/databricks/cli/bundle/config/variable.Variable.lookup: description: PLACEHOLDER
description: PLACEHOLDER type:
github.com/databricks/cli/bundle/config/variable.Variable.type: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.VariableType:
description: PLACEHOLDER
github.com/databricks/cli/libs/exec.ExecutableType:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindowDaysOfWeek:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig: github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
description: PLACEHOLDER ai21labs_api_key:
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig.ai21labs_api_key: description: PLACEHOLDER
description: PLACEHOLDER ai21labs_api_key_plaintext:
github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig.ai21labs_api_key_plaintext: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig: github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
description: PLACEHOLDER private_key:
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig.private_key: description: PLACEHOLDER
description: PLACEHOLDER private_key_plaintext:
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig.private_key_plaintext: description: PLACEHOLDER
description: PLACEHOLDER project_id:
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig.project_id: description: PLACEHOLDER
description: PLACEHOLDER region:
github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig.region: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig: github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
description: PLACEHOLDER microsoft_entra_client_id:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.microsoft_entra_client_id: description: PLACEHOLDER
description: PLACEHOLDER microsoft_entra_client_secret:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.microsoft_entra_client_secret: description: PLACEHOLDER
description: PLACEHOLDER microsoft_entra_client_secret_plaintext:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.microsoft_entra_client_secret_plaintext: description: PLACEHOLDER
description: PLACEHOLDER microsoft_entra_tenant_id:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.microsoft_entra_tenant_id: description: PLACEHOLDER
description: PLACEHOLDER openai_api_base:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_api_base: description: PLACEHOLDER
description: PLACEHOLDER openai_api_key:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_api_key: description: PLACEHOLDER
description: PLACEHOLDER openai_api_key_plaintext:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_api_key_plaintext: description: PLACEHOLDER
description: PLACEHOLDER openai_api_type:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_api_type: description: PLACEHOLDER
description: PLACEHOLDER openai_api_version:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_api_version: description: PLACEHOLDER
description: PLACEHOLDER openai_deployment_name:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_deployment_name: description: PLACEHOLDER
description: PLACEHOLDER openai_organization:
github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig.openai_organization: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig: github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
description: PLACEHOLDER palm_api_key:
github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig.palm_api_key: description: PLACEHOLDER
description: PLACEHOLDER palm_api_key_plaintext:
github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig.palm_api_key_plaintext: description: PLACEHOLDER
description: PLACEHOLDER

File diff suppressed because it is too large Load Diff

View File

@ -1,344 +1,112 @@
github.com/databricks/cli/bundle/config/resources.Cluster: github.com/databricks/cli/bundle/config/resources.Cluster:
description: PLACEHOLDER data_security_mode:
github.com/databricks/cli/bundle/config/resources.Cluster.data_security_mode: description: PLACEHOLDER
description: PLACEHOLDER docker_image:
github.com/databricks/cli/bundle/config/resources.Cluster.docker_image: description: PLACEHOLDER
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.Cluster.permissions: description: PLACEHOLDER
description: PLACEHOLDER runtime_engine:
github.com/databricks/cli/bundle/config/resources.Cluster.runtime_engine: description: PLACEHOLDER
description: PLACEHOLDER workload_type:
github.com/databricks/cli/bundle/config/resources.Cluster.workload_type: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Dashboard: github.com/databricks/cli/bundle/config/resources.Dashboard:
description: PLACEHOLDER embed_credentials:
github.com/databricks/cli/bundle/config/resources.Dashboard.embed_credentials: description: PLACEHOLDER
description: PLACEHOLDER file_path:
github.com/databricks/cli/bundle/config/resources.Dashboard.file_path: description: PLACEHOLDER
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.Dashboard.permissions: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Job: github.com/databricks/cli/bundle/config/resources.Job:
description: PLACEHOLDER health:
github.com/databricks/cli/bundle/config/resources.Job.health: description: PLACEHOLDER
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.Job.permissions: description: PLACEHOLDER
description: PLACEHOLDER run_as:
github.com/databricks/cli/bundle/config/resources.Job.run_as: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowExperiment: github.com/databricks/cli/bundle/config/resources.MlflowExperiment:
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.MlflowExperiment.permissions: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.MlflowModel: github.com/databricks/cli/bundle/config/resources.MlflowModel:
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.MlflowModel.permissions: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint.permissions: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Pipeline: github.com/databricks/cli/bundle/config/resources.Pipeline:
description: PLACEHOLDER permissions:
github.com/databricks/cli/bundle/config/resources.Pipeline.permissions: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.QualityMonitor: github.com/databricks/cli/bundle/config/resources.QualityMonitor:
description: PLACEHOLDER table_name:
github.com/databricks/cli/bundle/config/resources.QualityMonitor.table_name: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.RegisteredModel: github.com/databricks/cli/bundle/config/resources.RegisteredModel:
description: PLACEHOLDER grants:
github.com/databricks/cli/bundle/config/resources.RegisteredModel.grants: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Schema: github.com/databricks/cli/bundle/config/resources.Schema:
description: PLACEHOLDER grants:
github.com/databricks/cli/bundle/config/resources.Schema.grants: description: PLACEHOLDER
description: PLACEHOLDER properties:
github.com/databricks/cli/bundle/config/resources.Schema.properties: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Volume: github.com/databricks/cli/bundle/config/resources.Volume:
description: PLACEHOLDER grants:
github.com/databricks/cli/bundle/config/resources.Volume.grants: description: PLACEHOLDER
description: PLACEHOLDER volume_type:
github.com/databricks/cli/bundle/config/resources.Volume.volume_type: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.VolumeType:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AutoScale:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes: github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes:
description: PLACEHOLDER availability:
github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes.availability: description: PLACEHOLDER
description: PLACEHOLDER ebs_volume_type:
github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes.ebs_volume_type: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes: github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes:
description: PLACEHOLDER availability:
github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes.availability: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec: github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec:
description: PLACEHOLDER data_security_mode:
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec.data_security_mode: description: PLACEHOLDER
description: PLACEHOLDER docker_image:
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec.docker_image: description: PLACEHOLDER
description: PLACEHOLDER runtime_engine:
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec.runtime_engine: description: PLACEHOLDER
description: PLACEHOLDER workload_type:
github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec.workload_type: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.DockerImage: github.com/databricks/databricks-sdk-go/service/compute.DockerImage:
description: PLACEHOLDER basic_auth:
github.com/databricks/databricks-sdk-go/service/compute.DockerImage.basic_auth: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes: github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes:
description: PLACEHOLDER availability:
github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes.availability: description: PLACEHOLDER
description: PLACEHOLDER github.com/databricks/databricks-sdk-go/service/jobs.GitSource:
github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo: git_snapshot:
description: PLACEHOLDER description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.Library:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.WorkloadType:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/dashboards.LifecycleState:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Condition:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Continuous:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.DbtTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Format:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.GitProvider:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.GitSource.git_snapshot:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobCluster:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobDeployment:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment: github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment:
description: PLACEHOLDER spec:
github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment.spec: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule: github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule:
description: PLACEHOLDER metric:
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule.metric: description: PLACEHOLDER
description: PLACEHOLDER op:
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule.op: description: PLACEHOLDER
description: PLACEHOLDER github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules:
github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules.rules: rules:
description: PLACEHOLDER description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask: github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask:
description: PLACEHOLDER python_named_params:
github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask.python_named_params: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SqlTask:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Task: github.com/databricks/databricks-sdk-go/service/jobs.Task:
description: PLACEHOLDER health:
github.com/databricks/databricks-sdk-go/service/jobs.Task.health: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings: github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings:
description: PLACEHOLDER table_update:
github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings.table_update: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.Webhook: github.com/databricks/databricks-sdk-go/service/jobs.Webhook:
description: PLACEHOLDER id:
github.com/databricks/databricks-sdk-go/service/jobs.Webhook.id: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/ml.ModelTag:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/ml.ModelVersion:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/ml.ModelVersionTag:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger: github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger:
description: PLACEHOLDER quartz_cron_schedule:
github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger.quartz_cron_schedule: description: PLACEHOLDER
description: PLACEHOLDER timezone_id:
github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger.timezone_id: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.Filters:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.Notifications:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineDeployment:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
description: PLACEHOLDER cron:
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger.cron: description: PLACEHOLDER
description: PLACEHOLDER manual:
github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger.manual: description: PLACEHOLDER
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.CohereConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.EndpointTag:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.ExternalModel:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.Route:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
description: PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig:
description: PLACEHOLDER

View File

@ -4,9 +4,13 @@ import (
"io" "io"
"os" "os"
"path" "path"
"reflect"
"runtime" "runtime"
"testing" "testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/jsonschema"
"github.com/ghodss/yaml"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -61,3 +65,48 @@ func TestRequiredAnnotationsForNewFields(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, string(original), string(copied), "Missing JSON-schema descriptions for new config fields in bundle/internal/schema/annotations.yml") assert.Equal(t, string(original), string(copied), "Missing JSON-schema descriptions for new config fields in bundle/internal/schema/annotations.yml")
} }
// Checks whether types in annotation files are still present in Config type
func TestNoDetachedAnnotations(t *testing.T) {
if runtime.GOOS == "windows" {
t.Skip()
}
files := []string{
"annotations.yml",
"annotations_openapi.yml",
"annotations_openapi_overrides.yml",
}
types := map[string]bool{}
for _, file := range files {
annotations, err := getAnnotations(file)
assert.NoError(t, err)
for k := range annotations {
types[k] = false
}
}
_, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
delete(types, getPath(typ))
return s
},
})
assert.NoError(t, err)
for typ := range types {
t.Errorf("Type `%s` in annotations file is not found in `root.Config` type", typ)
}
assert.Empty(t, types, "Detached annotations found, regenerate schema and check for package path changes")
}
func getAnnotations(path string) (annotationFile, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var data annotationFile
err = yaml.Unmarshal(b, &data)
return data, err
}

View File

@ -26,6 +26,8 @@ type openapiParser struct {
ref map[string]jsonschema.Schema ref map[string]jsonschema.Schema
} }
const RootTypeKey = "_"
func newParser(path string) (*openapiParser, error) { func newParser(path string) (*openapiParser, error) {
b, err := os.ReadFile(path) b, err := os.ReadFile(path)
if err != nil { if err != nil {
@ -93,8 +95,8 @@ func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
// Use the OpenAPI spec to load descriptions for the given type. // Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error { func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overridesPath string) error {
annotations := map[string]annotation{} annotations := annotationFile{}
overrides := map[string]annotation{} overrides := annotationFile{}
b, err := os.ReadFile(overridesPath) b, err := os.ReadFile(overridesPath)
if err != nil { if err != nil {
@ -105,7 +107,7 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
return err return err
} }
if overrides == nil { if overrides == nil {
overrides = map[string]annotation{} overrides = annotationFile{}
} }
_, err = jsonschema.FromType(typ, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{ _, err = jsonschema.FromType(typ, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
@ -115,25 +117,22 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
return s return s
} }
basePath := jsonschema.TypePath(typ) basePath := getPath(typ)
annotations[basePath] = annotation{ pkg := map[string]annotation{}
Description: ref.Description, annotations[basePath] = pkg
Enum: ref.Enum,
} if ref.Description != "" || ref.Enum != nil {
if ref.Description == "" { pkg[RootTypeKey] = annotation{Description: ref.Description, Enum: ref.Enum}
addEmptyOverride(basePath, overrides)
} }
for k := range s.Properties { for k := range s.Properties {
itemPath := fmt.Sprintf("%s.%s", basePath, k)
if refProp, ok := ref.Properties[k]; ok { if refProp, ok := ref.Properties[k]; ok {
annotations[itemPath] = annotation{Description: refProp.Description, Enum: refProp.Enum} pkg[k] = annotation{Description: refProp.Description, Enum: refProp.Enum}
if refProp.Description == "" { if refProp.Description == "" {
addEmptyOverride(itemPath, overrides) addEmptyOverride(k, basePath, overrides)
} }
} else { } else {
addEmptyOverride(itemPath, overrides) addEmptyOverride(k, basePath, overrides)
} }
} }
return s return s
@ -166,17 +165,22 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
return nil return nil
} }
func addEmptyOverride(path string, overrides map[string]annotation) { func addEmptyOverride(key, pkg string, overridesFile annotationFile) {
if overrides[path].Description == "" { if overridesFile[pkg] == nil {
overrides[path] = annotation{Description: Placeholder} overridesFile[pkg] = map[string]annotation{}
} }
a, ok := overrides[path] overrides := overridesFile[pkg]
if overrides[key].Description == "" {
overrides[key] = annotation{Description: Placeholder}
}
a, ok := overrides[key]
if !ok { if !ok {
a = annotation{} a = annotation{}
} }
if a.Description == "" { if a.Description == "" {
a.Description = Placeholder a.Description = Placeholder
} }
overrides[path] = a overrides[key] = a
} }

View File

@ -935,10 +935,9 @@
"anyOf": [ "anyOf": [
{ {
"type": "object", "type": "object",
"description": "Defines the attributes to build an artifact.",
"properties": { "properties": {
"build": { "build": {
"description": "The command to build the artifact.", "description": "An optional set of non-default build commands that you want to run locally before deployment. For Python wheel builds, the Databricks CLI assumes that it can find a local install of the Python wheel package to run builds, and it runs the command python setup.py bdist_wheel by default during each bundle deployment. To specify multiple build commands, separate each command with double-ampersand (\u0026\u0026) characters.",
"$ref": "#/$defs/string" "$ref": "#/$defs/string"
}, },
"executable": { "executable": {
@ -957,8 +956,7 @@
"additionalProperties": false, "additionalProperties": false,
"required": [ "required": [
"type" "type"
], ]
"title": "Artifact"
}, },
{ {
"type": "string", "type": "string",
@ -6159,6 +6157,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"artifacts": { "artifacts": {
"description": "Defines the attributes to build an artifact",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact" "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
}, },
"bundle": { "bundle": {

View File

@ -52,7 +52,7 @@ func (c *constructor) Definitions() map[string]any {
// Remove the root type from the definitions. We don't need to include it in // Remove the root type from the definitions. We don't need to include it in
// the definitions because it will be inlined as the root of the generated JSON schema. // the definitions because it will be inlined as the root of the generated JSON schema.
delete(defs, TypePath(c.root)) delete(defs, typePath(c.root))
if len(defs) == 0 { if len(defs) == 0 {
return nil return nil
@ -106,14 +106,14 @@ func FromType(typ reflect.Type, fns []func(typ reflect.Type, s Schema) Schema) (
} }
} }
res := c.definitions[TypePath(typ)] res := c.definitions[typePath(typ)]
res.Definitions = c.Definitions() res.Definitions = c.Definitions()
return res, nil return res, nil
} }
// TypePath computes a unique string representation of the type. $ref in the generated // typePath computes a unique string representation of the type. $ref in the generated
// JSON schema will refer to this path. See TestTypePath for examples outputs. // JSON schema will refer to this path. See TestTypePath for examples outputs.
func TypePath(typ reflect.Type) string { func typePath(typ reflect.Type) string {
// Pointers have a typ.Name() of "". Dereference them to get the underlying type. // Pointers have a typ.Name() of "". Dereference them to get the underlying type.
for typ.Kind() == reflect.Ptr { for typ.Kind() == reflect.Ptr {
typ = typ.Elem() typ = typ.Elem()
@ -125,7 +125,7 @@ func TypePath(typ reflect.Type) string {
// Recursively call typePath, to handle slices of slices / maps. // Recursively call typePath, to handle slices of slices / maps.
if typ.Kind() == reflect.Slice { if typ.Kind() == reflect.Slice {
return path.Join("slice", TypePath(typ.Elem())) return path.Join("slice", typePath(typ.Elem()))
} }
if typ.Kind() == reflect.Map { if typ.Kind() == reflect.Map {
@ -134,7 +134,7 @@ func TypePath(typ reflect.Type) string {
} }
// Recursively call typePath, to handle maps of maps / slices. // Recursively call typePath, to handle maps of maps / slices.
return path.Join("map", TypePath(typ.Elem())) return path.Join("map", typePath(typ.Elem()))
} }
switch { switch {
@ -157,7 +157,7 @@ func (c *constructor) walk(typ reflect.Type) (string, error) {
typ = typ.Elem() typ = typ.Elem()
} }
typPath := TypePath(typ) typPath := typePath(typ)
// Return early if the type has already been seen, to avoid infinite recursion. // Return early if the type has already been seen, to avoid infinite recursion.
if _, ok := c.seen[typPath]; ok { if _, ok := c.seen[typPath]; ok {

View File

@ -510,12 +510,12 @@ func TestTypePath(t *testing.T) {
for _, tc := range tcases { for _, tc := range tcases {
t.Run(tc.typ.String(), func(t *testing.T) { t.Run(tc.typ.String(), func(t *testing.T) {
assert.Equal(t, tc.path, TypePath(tc.typ)) assert.Equal(t, tc.path, typePath(tc.typ))
}) })
} }
// Maps with non-string keys should panic. // Maps with non-string keys should panic.
assert.PanicsWithValue(t, "found map with non-string key: int", func() { assert.PanicsWithValue(t, "found map with non-string key: int", func() {
TypePath(reflect.TypeOf(map[int]int{})) typePath(reflect.TypeOf(map[int]int{}))
}) })
} }