mirror of https://github.com/databricks/cli.git
Compare commits
5 Commits
e55df7b207
...
2d62c0ceb7
Author | SHA1 | Date |
---|---|---|
Shreyas Goenka | 2d62c0ceb7 | |
Shreyas Goenka | c69e6d97df | |
Shreyas Goenka | 8cd26318e8 | |
Shreyas Goenka | 75a571a56d | |
Shreyas Goenka | 1dd399f3fe |
|
@ -25,16 +25,19 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
|
|||
case jsonschema.ArrayType, jsonschema.ObjectType:
|
||||
// arrays and objects can have complex variable values specified.
|
||||
return jsonschema.Schema{
|
||||
AnyOf: []jsonschema.Schema{s, {
|
||||
Type: jsonschema.StringType,
|
||||
Pattern: interpolationPattern("var"),
|
||||
}},
|
||||
AnyOf: []jsonschema.Schema{
|
||||
s,
|
||||
{
|
||||
Type: jsonschema.StringType,
|
||||
Pattern: interpolationPattern("var"),
|
||||
}},
|
||||
}
|
||||
case jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType:
|
||||
// primitives can have variable values, or references like ${bundle.xyz}
|
||||
// or ${workspace.xyz}
|
||||
return jsonschema.Schema{
|
||||
AnyOf: []jsonschema.Schema{s,
|
||||
AnyOf: []jsonschema.Schema{
|
||||
s,
|
||||
{Type: jsonschema.StringType, Pattern: interpolationPattern("resources")},
|
||||
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},
|
||||
{Type: jsonschema.StringType, Pattern: interpolationPattern("workspace")},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package generated
|
||||
package schema
|
||||
|
||||
import _ "embed"
|
||||
|
||||
//go:embed jsonschema.json
|
||||
var BundleSchema []byte
|
||||
var Bytes []byte
|
|
@ -0,0 +1,71 @@
|
|||
package schema_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle/schema"
|
||||
"github.com/databricks/cli/libs/jsonschema"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func walk(defs map[string]any, p ...string) jsonschema.Schema {
|
||||
v, ok := defs[p[0]]
|
||||
if !ok {
|
||||
panic("not found: " + p[0])
|
||||
}
|
||||
|
||||
if len(p) == 1 {
|
||||
b, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
res := jsonschema.Schema{}
|
||||
err = json.Unmarshal(b, &res)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
return walk(v.(map[string]any), p[1:]...)
|
||||
}
|
||||
|
||||
func TestJsonSchema(t *testing.T) {
|
||||
s := jsonschema.Schema{}
|
||||
err := json.Unmarshal(schema.Bytes, &s)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Assert job fields have their descriptions loaded.
|
||||
resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job")
|
||||
fields := []string{"name", "continuous", "deployment", "tasks", "trigger"}
|
||||
for _, field := range fields {
|
||||
assert.NotEmpty(t, resourceJob.AnyOf[0].Properties[field].Description)
|
||||
}
|
||||
|
||||
// Assert descriptions were also loaded for a job task definition.
|
||||
jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task")
|
||||
fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"}
|
||||
for _, field := range fields {
|
||||
assert.NotEmpty(t, jobTask.AnyOf[0].Properties[field].Description)
|
||||
}
|
||||
|
||||
// Assert descriptions are loaded for pipelines
|
||||
pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline")
|
||||
fields = []string{"name", "catalog", "clusters", "channel", "continuous", "deployment", "development"}
|
||||
for _, field := range fields {
|
||||
assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description)
|
||||
}
|
||||
|
||||
// Assert enum values are loaded
|
||||
schedule := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "catalog.MonitorCronSchedule")
|
||||
assert.Contains(t, schedule.AnyOf[0].Properties["pause_status"].Enum, "PAUSED")
|
||||
assert.Contains(t, schedule.AnyOf[0].Properties["pause_status"].Enum, "UNPAUSED")
|
||||
|
||||
providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider")
|
||||
assert.Contains(t, providers.Enum, "gitHub")
|
||||
assert.Contains(t, providers.Enum, "bitbucketCloud")
|
||||
assert.Contains(t, providers.Enum, "gitHubEnterprise")
|
||||
assert.Contains(t, providers.Enum, "bitbucketServer")
|
||||
}
|
|
@ -3,7 +3,7 @@ package bundle
|
|||
import (
|
||||
_ "embed"
|
||||
|
||||
"github.com/databricks/cli/bundle/generated"
|
||||
"github.com/databricks/cli/bundle/schema"
|
||||
"github.com/databricks/cli/cmd/root"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
@ -16,7 +16,7 @@ func newSchemaCommand() *cobra.Command {
|
|||
}
|
||||
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
_, err := cmd.OutOrStdout().Write(generated.BundleSchema)
|
||||
_, err := cmd.OutOrStdout().Write(schema.Bytes)
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
|
@ -10,17 +10,19 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
// Fields tagged "readonly" should not be emitted in the schema as they are
|
||||
// computed at runtime, and should not be assigned a value by the bundle author.
|
||||
const readonlyTag = "readonly"
|
||||
var skipTags = []string{
|
||||
// Fields tagged "readonly" should not be emitted in the schema as they are
|
||||
// computed at runtime, and should not be assigned a value by the bundle author.
|
||||
"readonly",
|
||||
|
||||
// Annotation for internal bundle fields that should not be exposed to customers.
|
||||
// Fields can be tagged as "internal" to remove them from the generated schema.
|
||||
const internalTag = "internal"
|
||||
// Annotation for internal bundle fields that should not be exposed to customers.
|
||||
// Fields can be tagged as "internal" to remove them from the generated schema.
|
||||
"internal",
|
||||
|
||||
// Annotation for bundle fields that have been deprecated.
|
||||
// Fields tagged as "deprecated" are omitted from the generated schema.
|
||||
const deprecatedTag = "deprecated"
|
||||
// Annotation for bundle fields that have been deprecated.
|
||||
// Fields tagged as "deprecated" are omitted from the generated schema.
|
||||
"deprecated",
|
||||
}
|
||||
|
||||
type constructor struct {
|
||||
// Map of typ.PkgPath() + "." + typ.Name() to the schema for that type.
|
||||
|
@ -79,7 +81,7 @@ func (c *constructor) Definitions() map[string]any {
|
|||
return res
|
||||
}
|
||||
|
||||
// FromType converts a reflect.Type to a jsonschema.Schema. Nodes in the final JSON
|
||||
// FromType converts a [reflect.Type] to a [Schema]. Nodes in the final JSON
|
||||
// schema are guaranteed to be one level deep, which is done using defining $defs
|
||||
// for every Go type and referring them using $ref in the corresponding node in
|
||||
// the JSON schema.
|
||||
|
@ -99,8 +101,8 @@ func FromType(typ reflect.Type, fns []func(typ reflect.Type, s Schema) Schema) (
|
|||
}
|
||||
|
||||
for _, fn := range fns {
|
||||
for k, v := range c.definitions {
|
||||
c.definitions[k] = fn(c.seen[k], v)
|
||||
for k := range c.definitions {
|
||||
c.definitions[k] = fn(c.seen[k], c.definitions[k])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -251,9 +253,14 @@ func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) {
|
|||
bundleTags := strings.Split(structField.Tag.Get("bundle"), ",")
|
||||
// Fields marked as "readonly", "internal" or "deprecated" are skipped
|
||||
// while generating the schema
|
||||
if slices.Contains(bundleTags, readonlyTag) ||
|
||||
slices.Contains(bundleTags, internalTag) ||
|
||||
slices.Contains(bundleTags, deprecatedTag) {
|
||||
skip := false
|
||||
for _, tag := range skipTags {
|
||||
if slices.Contains(bundleTags, tag) {
|
||||
skip = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if skip {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue