Compare commits

..

No commits in common. "d07192fe039e7b73c26508d35c85a4871c73587e" and "535f6708685a8f0246c738ea0eee4e9c18550084" have entirely different histories.

35 changed files with 8229 additions and 1263 deletions

View File

@ -1 +1 @@
3eae49b444cac5a0118a3503e5b7ecef7f96527a f98c07f9c71f579de65d2587bb0292f83d10e55d

3
.gitattributes vendored
View File

@ -75,8 +75,6 @@ cmd/workspace/online-tables/online-tables.go linguist-generated=true
cmd/workspace/permission-migration/permission-migration.go linguist-generated=true cmd/workspace/permission-migration/permission-migration.go linguist-generated=true
cmd/workspace/permissions/permissions.go linguist-generated=true cmd/workspace/permissions/permissions.go linguist-generated=true
cmd/workspace/pipelines/pipelines.go linguist-generated=true cmd/workspace/pipelines/pipelines.go linguist-generated=true
cmd/workspace/policy-compliance-for-clusters/policy-compliance-for-clusters.go linguist-generated=true
cmd/workspace/policy-compliance-for-jobs/policy-compliance-for-jobs.go linguist-generated=true
cmd/workspace/policy-families/policy-families.go linguist-generated=true cmd/workspace/policy-families/policy-families.go linguist-generated=true
cmd/workspace/provider-exchange-filters/provider-exchange-filters.go linguist-generated=true cmd/workspace/provider-exchange-filters/provider-exchange-filters.go linguist-generated=true
cmd/workspace/provider-exchanges/provider-exchanges.go linguist-generated=true cmd/workspace/provider-exchanges/provider-exchanges.go linguist-generated=true
@ -96,7 +94,6 @@ cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=tr
cmd/workspace/recipients/recipients.go linguist-generated=true cmd/workspace/recipients/recipients.go linguist-generated=true
cmd/workspace/registered-models/registered-models.go linguist-generated=true cmd/workspace/registered-models/registered-models.go linguist-generated=true
cmd/workspace/repos/repos.go linguist-generated=true cmd/workspace/repos/repos.go linguist-generated=true
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true
cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go linguist-generated=true cmd/workspace/restrict-workspace-admins/restrict-workspace-admins.go linguist-generated=true
cmd/workspace/schemas/schemas.go linguist-generated=true cmd/workspace/schemas/schemas.go linguist-generated=true
cmd/workspace/secrets/secrets.go linguist-generated=true cmd/workspace/secrets/secrets.go linguist-generated=true

View File

@ -40,10 +40,6 @@ func (v *verifyCliVersion) Apply(ctx context.Context, b *bundle.Bundle) diag.Dia
} }
if !c.Check(version) { if !c.Check(version) {
if version.Prerelease() == "dev" && version.Major() == 0 {
return diag.Warningf("Ignoring Databricks CLI version constraint for development build. Required: %s, current: %s", constraint, currentVersion)
}
return diag.Errorf("Databricks CLI version constraint not satisfied. Required: %s, current: %s", constraint, currentVersion) return diag.Errorf("Databricks CLI version constraint not satisfied. Required: %s, current: %s", constraint, currentVersion)
} }

View File

@ -107,11 +107,6 @@ func TestVerifyCliVersion(t *testing.T) {
constraint: "^0.100", constraint: "^0.100",
expectedError: "invalid version constraint \"^0.100\" specified. Please specify the version constraint in the format (>=) 0.0.0(, <= 1.0.0)", expectedError: "invalid version constraint \"^0.100\" specified. Please specify the version constraint in the format (>=) 0.0.0(, <= 1.0.0)",
}, },
{
currentVersion: "0.0.0-dev+06b169284737",
constraint: ">= 0.100.0",
expectedError: "Ignoring Databricks CLI version constraint for development build. Required: >= 0.100.0",
},
} }
t.Cleanup(func() { t.Cleanup(func() {
@ -135,7 +130,7 @@ func TestVerifyCliVersion(t *testing.T) {
diags := bundle.Apply(context.Background(), b, VerifyCliVersion()) diags := bundle.Apply(context.Background(), b, VerifyCliVersion())
if tc.expectedError != "" { if tc.expectedError != "" {
require.NotEmpty(t, diags) require.NotEmpty(t, diags)
require.Contains(t, diags[0].Summary, tc.expectedError) require.Equal(t, tc.expectedError, diags.Error().Error())
} else { } else {
require.Empty(t, diags) require.Empty(t, diags)
} }

View File

@ -0,0 +1,42 @@
package main
import (
"encoding/json"
"fmt"
"log"
"os"
"github.com/databricks/cli/bundle/schema"
)
func main() {
if len(os.Args) != 2 {
fmt.Println("Usage: go run main.go <output-file>")
os.Exit(1)
}
// Output file, to write the generated schema descriptions to.
outputFile := os.Args[1]
// Input file, the databricks openapi spec.
inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC")
if inputFile == "" {
log.Fatal("DATABRICKS_OPENAPI_SPEC environment variable not set")
}
// Generate the schema descriptions.
docs, err := schema.UpdateBundleDescriptions(inputFile)
if err != nil {
log.Fatal(err)
}
result, err := json.MarshalIndent(docs, "", " ")
if err != nil {
log.Fatal(err)
}
// Write the schema descriptions to the output file.
err = os.WriteFile(outputFile, result, 0644)
if err != nil {
log.Fatal(err)
}
}

View File

@ -1,97 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"log"
"os"
"reflect"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/jsonschema"
)
func interpolationPattern(s string) string {
return fmt.Sprintf(`\$\{(%s(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`, s)
}
func addInterpolationPatterns(_ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
switch s.Type {
case jsonschema.ArrayType, jsonschema.ObjectType:
// arrays and objects can have complex variable values specified.
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{s, {
Type: jsonschema.StringType,
// TODO: Are multi-level complex variable references supported?
Pattern: interpolationPattern("var"),
}},
}
case jsonschema.StringType, jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType:
// primitives can have variable values, or references like ${bundle.xyz}
// or ${workspace.xyz}
// TODO: Followup, do not allow references like ${} in the schema unless
// they are of the permitted patterns?
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{s,
// TODO: Add "resources" here
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("workspace")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("var")},
},
}
default:
return s
}
}
// TODO: Add a couple of end to end tests that the bundle schema generated is
// correct.
// TODO: Call out in the PR description that recursive types like "for_each_task"
// are now supported. Manually test for_each_task.
// TODO: The bundle_descriptions.json file contains a bunch of custom descriptions
// as well. Make sure to pull those in.
// TODO: Add unit tests for all permutations of structs, maps and slices for the FromType
// method.
// TODO: Note the minor regression of losing the bundle descriptions
func main() {
if len(os.Args) != 2 {
fmt.Println("Usage: go run main.go <output-file>")
os.Exit(1)
}
// Output file, where the generated JSON schema will be written to.
outputFile := os.Args[1]
// Input file, the databricks openapi spec.
inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC")
if inputFile == "" {
log.Fatal("DATABRICKS_OPENAPI_SPEC environment variable not set")
}
p, err := newParser(inputFile)
if err != nil {
log.Fatal(err)
}
// Generate the JSON schema from the bundle Go struct.
s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
p.addDescriptions,
p.addEnums,
addInterpolationPatterns,
})
if err != nil {
log.Fatal(err)
}
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
log.Fatal(err)
}
// Write the schema descriptions to the output file.
err = os.WriteFile(outputFile, b, 0644)
if err != nil {
log.Fatal(err)
}
}

View File

@ -1,131 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"os"
"path"
"reflect"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
type Components struct {
Schemas map[string]jsonschema.Schema `json:"schemas,omitempty"`
}
type Specification struct {
Components Components `json:"components"`
}
type openapiParser struct {
ref map[string]jsonschema.Schema
}
func newParser(path string) (*openapiParser, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
spec := Specification{}
err = json.Unmarshal(b, &spec)
if err != nil {
return nil, err
}
p := &openapiParser{}
p.ref = spec.Components.Schemas
return p, nil
}
// This function finds any JSON schemas that were defined in the OpenAPI spec
// that correspond to the given Go SDK type. It looks both at the type itself
// and any embedded types within it.
func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
typs := []reflect.Type{typ}
// If the type is a struct, the corresponding Go SDK struct might be embedded
// in it. We need to check for those as well.
if typ.Kind() == reflect.Struct {
for i := 0; i < typ.NumField(); i++ {
if !typ.Field(i).Anonymous {
continue
}
// Deference current type if it's a pointer.
ctyp := typ.Field(i).Type
for ctyp.Kind() == reflect.Ptr {
ctyp = ctyp.Elem()
}
typs = append(typs, ctyp)
}
}
for _, ctyp := range typs {
// Skip if it's not a Go SDK type.
if !strings.HasPrefix(ctyp.PkgPath(), "github.com/databricks/databricks-sdk-go") {
continue
}
pkgName := path.Base(ctyp.PkgPath())
k := fmt.Sprintf("%s.%s", pkgName, ctyp.Name())
// Skip if the type is not in the openapi spec.
_, ok := p.ref[k]
if !ok {
continue
}
// Return the first Go SDK type found in the openapi spec.
return p.ref[k], true
}
return jsonschema.Schema{}, false
}
// Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) addDescriptions(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
return s
}
s.Description = ref.Description
// Iterate over properties to load descriptions. This is not needed for any
// OpenAPI spec generated from protobufs, which are guaranteed to be one level
// deep.
// Needed for any hand-written OpenAPI specs.
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Description = refProp.Description
}
}
return s
}
// Use the OpenAPI spec add enum values for the given type.
func (p *openapiParser) addEnums(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
return s
}
s.Enum = append(s.Enum, ref.Enum...)
// Iterate over properties to load enums. This is not needed for any
// OpenAPI spec generated from protobufs, which are guaranteed to be one level
// deep.
// Needed for any hand-written OpenAPI specs.
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Enum = append(v.Enum, refProp.Enum...)
}
}
return s
}

View File

@ -1,24 +1,19 @@
package libraries package libraries
import ( import "github.com/databricks/databricks-sdk-go/service/compute"
"fmt"
"github.com/databricks/databricks-sdk-go/service/compute" func libraryPath(library *compute.Library) string {
)
func libraryPath(library *compute.Library) (string, error) {
if library.Whl != "" { if library.Whl != "" {
return library.Whl, nil return library.Whl
} }
if library.Jar != "" { if library.Jar != "" {
return library.Jar, nil return library.Jar
} }
if library.Egg != "" { if library.Egg != "" {
return library.Egg, nil return library.Egg
} }
if library.Requirements != "" { if library.Requirements != "" {
return library.Requirements, nil return library.Requirements
} }
return ""
return "", fmt.Errorf("not supported library type")
} }

View File

@ -10,27 +10,9 @@ import (
func TestLibraryPath(t *testing.T) { func TestLibraryPath(t *testing.T) {
path := "/some/path" path := "/some/path"
p, err := libraryPath(&compute.Library{Whl: path}) assert.Equal(t, path, libraryPath(&compute.Library{Whl: path}))
assert.Equal(t, path, p) assert.Equal(t, path, libraryPath(&compute.Library{Jar: path}))
assert.Nil(t, err) assert.Equal(t, path, libraryPath(&compute.Library{Egg: path}))
assert.Equal(t, path, libraryPath(&compute.Library{Requirements: path}))
p, err = libraryPath(&compute.Library{Jar: path}) assert.Equal(t, "", libraryPath(&compute.Library{}))
assert.Equal(t, path, p)
assert.Nil(t, err)
p, err = libraryPath(&compute.Library{Egg: path})
assert.Equal(t, path, p)
assert.Nil(t, err)
p, err = libraryPath(&compute.Library{Requirements: path})
assert.Equal(t, path, p)
assert.Nil(t, err)
p, err = libraryPath(&compute.Library{})
assert.Equal(t, "", p)
assert.NotNil(t, err)
p, err = libraryPath(&compute.Library{Pypi: &compute.PythonPyPiLibrary{Package: "pypipackage"}})
assert.Equal(t, "", p)
assert.NotNil(t, err)
} }

View File

@ -67,12 +67,7 @@ func FindTasksWithLocalLibraries(b *bundle.Bundle) []jobs.Task {
func isTaskWithLocalLibraries(task jobs.Task) bool { func isTaskWithLocalLibraries(task jobs.Task) bool {
for _, l := range task.Libraries { for _, l := range task.Libraries {
p, err := libraryPath(&l) if IsLibraryLocal(libraryPath(&l)) {
// If there's an error, skip the library because it's not of supported type
if err != nil {
continue
}
if IsLibraryLocal(p) {
return true return true
} }
} }

View File

@ -43,10 +43,6 @@ func IsLocalPath(p string) bool {
// We can't use IsLocalPath beacuse environment dependencies can be // We can't use IsLocalPath beacuse environment dependencies can be
// a pypi package name which can be misinterpreted as a local path by IsLocalPath. // a pypi package name which can be misinterpreted as a local path by IsLocalPath.
func IsLibraryLocal(dep string) bool { func IsLibraryLocal(dep string) bool {
if dep == "" {
return false
}
possiblePrefixes := []string{ possiblePrefixes := []string{
".", ".",
} }

View File

@ -48,7 +48,6 @@ func TestIsLibraryLocal(t *testing.T) {
{path: "../../local/*.whl", expected: true}, {path: "../../local/*.whl", expected: true},
{path: "..\\..\\local\\*.whl", expected: true}, {path: "..\\..\\local\\*.whl", expected: true},
{path: "file://path/to/package/whl.whl", expected: true}, {path: "file://path/to/package/whl.whl", expected: true},
{path: "", expected: false},
{path: "pypipackage", expected: false}, {path: "pypipackage", expected: false},
{path: "/Volumes/catalog/schema/volume/path.whl", expected: false}, {path: "/Volumes/catalog/schema/volume/path.whl", expected: false},
{path: "/Workspace/my_project/dist.whl", expected: false}, {path: "/Workspace/my_project/dist.whl", expected: false},

View File

@ -29,8 +29,8 @@ func IsWorkspacePath(path string) bool {
// IsWorkspaceLibrary returns true if the specified library refers to a workspace path. // IsWorkspaceLibrary returns true if the specified library refers to a workspace path.
func IsWorkspaceLibrary(library *compute.Library) bool { func IsWorkspaceLibrary(library *compute.Library) bool {
path, err := libraryPath(library) path := libraryPath(library)
if err != nil { if path == "" {
return false return false
} }

View File

@ -223,17 +223,6 @@ func TestNoIncompatibleWheelTasks(t *testing.T) {
{Whl: "./dist/test.whl"}, {Whl: "./dist/test.whl"},
}, },
}, },
{
TaskKey: "key7",
PythonWheelTask: &jobs.PythonWheelTask{},
ExistingClusterId: "test-key-2",
Libraries: []compute.Library{
{Whl: "signol_lib-0.4.4-20240822+prod-py3-none-any.whl"},
{Pypi: &compute.PythonPyPiLibrary{
Package: "requests==2.25.1",
}},
},
},
}, },
}, },
}, },
@ -252,46 +241,6 @@ func TestNoIncompatibleWheelTasks(t *testing.T) {
require.False(t, hasIncompatibleWheelTasks(context.Background(), b)) require.False(t, hasIncompatibleWheelTasks(context.Background(), b))
} }
func TestTasksWithPyPiPackageAreCompatible(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
JobSettings: &jobs.JobSettings{
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "cluster1",
NewCluster: compute.ClusterSpec{
SparkVersion: "12.2.x-scala2.12",
},
},
},
Tasks: []jobs.Task{
{
TaskKey: "key1",
PythonWheelTask: &jobs.PythonWheelTask{},
ExistingClusterId: "test-key-2",
Libraries: []compute.Library{
{Pypi: &compute.PythonPyPiLibrary{
Package: "requests==2.25.1",
}},
},
},
},
},
},
},
},
},
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
require.False(t, hasIncompatibleWheelTasks(context.Background(), b))
}
func TestNoWarningWhenPythonWheelWrapperIsOn(t *testing.T) { func TestNoWarningWhenPythonWheelWrapperIsOn(t *testing.T) {
b := &bundle.Bundle{ b := &bundle.Bundle{
Config: config.Root{ Config: config.Root{

18
bundle/schema/README.md Normal file
View File

@ -0,0 +1,18 @@
### Overview
`docs/bundle_descriptions.json` contains both autogenerated as well as manually written
descriptions for the json schema. Specifically
1. `resources` : almost all descriptions are autogenerated from the OpenAPI spec
2. `targets` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`)
3. `bundle` : manually editted
4. `include` : manually editted
5. `workspace` : manually editted
6. `artifacts` : manually editted
These descriptions are rendered in the inline documentation in an IDE
### SOP: Add schema descriptions for new fields in bundle config
Manually edit bundle_descriptions.json to add your descriptions. Note that the
descriptions in `resources` block is generated from the OpenAPI spec, and thus
any changes there will be overwritten.

109
bundle/schema/docs.go Normal file
View File

@ -0,0 +1,109 @@
package schema
import (
_ "embed"
"encoding/json"
"fmt"
"os"
"reflect"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/jsonschema"
)
// A subset of Schema struct
type Docs struct {
Description string `json:"description"`
Properties map[string]*Docs `json:"properties,omitempty"`
Items *Docs `json:"items,omitempty"`
AdditionalProperties *Docs `json:"additionalproperties,omitempty"`
}
//go:embed docs/bundle_descriptions.json
var bundleDocs []byte
func (docs *Docs) refreshTargetsDocs() error {
targetsDocs, ok := docs.Properties["targets"]
if !ok || targetsDocs.AdditionalProperties == nil ||
targetsDocs.AdditionalProperties.Properties == nil {
return fmt.Errorf("invalid targets descriptions")
}
targetProperties := targetsDocs.AdditionalProperties.Properties
propertiesToCopy := []string{"artifacts", "bundle", "resources", "workspace"}
for _, p := range propertiesToCopy {
targetProperties[p] = docs.Properties[p]
}
return nil
}
func LoadBundleDescriptions() (*Docs, error) {
embedded := Docs{}
err := json.Unmarshal(bundleDocs, &embedded)
return &embedded, err
}
func UpdateBundleDescriptions(openapiSpecPath string) (*Docs, error) {
embedded, err := LoadBundleDescriptions()
if err != nil {
return nil, err
}
// Generate schema from the embedded descriptions, and convert it back to docs.
// This creates empty descriptions for any properties that were missing in the
// embedded descriptions.
schema, err := New(reflect.TypeOf(config.Root{}), embedded)
if err != nil {
return nil, err
}
docs := schemaToDocs(schema)
// Load the Databricks OpenAPI spec
openapiSpec, err := os.ReadFile(openapiSpecPath)
if err != nil {
return nil, err
}
spec := &Specification{}
err = json.Unmarshal(openapiSpec, spec)
if err != nil {
return nil, err
}
openapiReader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
// Generate descriptions for the "resources" field
resourcesDocs, err := openapiReader.ResourcesDocs()
if err != nil {
return nil, err
}
resourceSchema, err := New(reflect.TypeOf(config.Resources{}), resourcesDocs)
if err != nil {
return nil, err
}
docs.Properties["resources"] = schemaToDocs(resourceSchema)
docs.refreshTargetsDocs()
return docs, nil
}
// *Docs are a subset of *Schema, this function selects that subset
func schemaToDocs(jsonSchema *jsonschema.Schema) *Docs {
// terminate recursion if schema is nil
if jsonSchema == nil {
return nil
}
docs := &Docs{
Description: jsonSchema.Description,
}
if len(jsonSchema.Properties) > 0 {
docs.Properties = make(map[string]*Docs)
}
for k, v := range jsonSchema.Properties {
docs.Properties[k] = schemaToDocs(v)
}
docs.Items = schemaToDocs(jsonSchema.Items)
if additionalProperties, ok := jsonSchema.AdditionalProperties.(*jsonschema.Schema); ok {
docs.AdditionalProperties = schemaToDocs(additionalProperties)
}
return docs
}

6371
bundle/schema/docs/bundle_descriptions.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,62 @@
package schema
import (
"encoding/json"
"testing"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSchemaToDocs(t *testing.T) {
jsonSchema := &jsonschema.Schema{
Type: "object",
Description: "root doc",
Properties: map[string]*jsonschema.Schema{
"foo": {Type: "number", Description: "foo doc"},
"bar": {Type: "string"},
"octave": {
Type: "object",
AdditionalProperties: &jsonschema.Schema{Type: "number"},
Description: "octave docs",
},
"scales": {
Type: "object",
Description: "scale docs",
Items: &jsonschema.Schema{Type: "string"},
},
},
}
docs := schemaToDocs(jsonSchema)
docsJson, err := json.MarshalIndent(docs, " ", " ")
require.NoError(t, err)
expected :=
`{
"description": "root doc",
"properties": {
"bar": {
"description": ""
},
"foo": {
"description": "foo doc"
},
"octave": {
"description": "octave docs",
"additionalproperties": {
"description": ""
}
},
"scales": {
"description": "scale docs",
"items": {
"description": ""
}
}
}
}`
t.Log("[DEBUG] actual: ", string(docsJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(docsJson))
}

293
bundle/schema/openapi.go Normal file
View File

@ -0,0 +1,293 @@
package schema
import (
"encoding/json"
"fmt"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
type OpenapiReader struct {
// OpenAPI spec to read schemas from.
OpenapiSpec *Specification
// In-memory cache of schemas read from the OpenAPI spec.
memo map[string]jsonschema.Schema
}
const SchemaPathPrefix = "#/components/schemas/"
// Read a schema directly from the OpenAPI spec.
func (reader *OpenapiReader) readOpenapiSchema(path string) (jsonschema.Schema, error) {
schemaKey := strings.TrimPrefix(path, SchemaPathPrefix)
// return early if we already have a computed schema
memoSchema, ok := reader.memo[schemaKey]
if ok {
return memoSchema, nil
}
// check path is present in openapi spec
openapiSchema, ok := reader.OpenapiSpec.Components.Schemas[schemaKey]
if !ok {
return jsonschema.Schema{}, fmt.Errorf("schema with path %s not found in openapi spec", path)
}
// convert openapi schema to the native schema struct
bytes, err := json.Marshal(*openapiSchema)
if err != nil {
return jsonschema.Schema{}, err
}
jsonSchema := jsonschema.Schema{}
err = json.Unmarshal(bytes, &jsonSchema)
if err != nil {
return jsonschema.Schema{}, err
}
// A hack to convert a map[string]interface{} to *Schema
// We rely on the type of a AdditionalProperties in downstream functions
// to do reference interpolation
_, ok = jsonSchema.AdditionalProperties.(map[string]interface{})
if ok {
b, err := json.Marshal(jsonSchema.AdditionalProperties)
if err != nil {
return jsonschema.Schema{}, err
}
additionalProperties := &jsonschema.Schema{}
err = json.Unmarshal(b, additionalProperties)
if err != nil {
return jsonschema.Schema{}, err
}
jsonSchema.AdditionalProperties = additionalProperties
}
// store read schema into memo
reader.memo[schemaKey] = jsonSchema
return jsonSchema, nil
}
// Resolve all nested "$ref" references in the schema. This function unrolls a single
// level of "$ref" in the schema and calls into traverseSchema to resolve nested references.
// Thus this function and traverseSchema are mutually recursive.
//
// This function is safe against reference loops. If a reference loop is detected, an error
// is returned.
func (reader *OpenapiReader) safeResolveRefs(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) {
if root.Reference == nil {
return reader.traverseSchema(root, tracker)
}
key := *root.Reference
// HACK to unblock CLI release (13th Feb 2024). This is temporary until proper
// support for recursive types is added to the docs generator. PR: https://github.com/databricks/cli/pull/1204
if strings.Contains(key, "ForEachTask") {
return root, nil
}
if tracker.hasCycle(key) {
// self reference loops can be supported however the logic is non-trivial because
// cross refernce loops are not allowed (see: http://json-schema.org/understanding-json-schema/structuring.html#recursion)
return nil, fmt.Errorf("references loop detected")
}
ref := *root.Reference
description := root.Description
tracker.push(ref, ref)
// Mark reference nil, so we do not traverse this again. This is tracked
// in the memo
root.Reference = nil
// unroll one level of reference.
selfRef, err := reader.readOpenapiSchema(ref)
if err != nil {
return nil, err
}
root = &selfRef
root.Description = description
// traverse again to find new references
root, err = reader.traverseSchema(root, tracker)
if err != nil {
return nil, err
}
tracker.pop(ref)
return root, err
}
// Traverse the nested properties of the schema to resolve "$ref" references. This function
// and safeResolveRefs are mutually recursive.
func (reader *OpenapiReader) traverseSchema(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) {
// case primitive (or invalid)
if root.Type != jsonschema.ObjectType && root.Type != jsonschema.ArrayType {
return root, nil
}
// only root references are resolved
if root.Reference != nil {
return reader.safeResolveRefs(root, tracker)
}
// case struct
if len(root.Properties) > 0 {
for k, v := range root.Properties {
childSchema, err := reader.safeResolveRefs(v, tracker)
if err != nil {
return nil, err
}
root.Properties[k] = childSchema
}
}
// case array
if root.Items != nil {
itemsSchema, err := reader.safeResolveRefs(root.Items, tracker)
if err != nil {
return nil, err
}
root.Items = itemsSchema
}
// case map
additionalProperties, ok := root.AdditionalProperties.(*jsonschema.Schema)
if ok && additionalProperties != nil {
valueSchema, err := reader.safeResolveRefs(additionalProperties, tracker)
if err != nil {
return nil, err
}
root.AdditionalProperties = valueSchema
}
return root, nil
}
func (reader *OpenapiReader) readResolvedSchema(path string) (*jsonschema.Schema, error) {
root, err := reader.readOpenapiSchema(path)
if err != nil {
return nil, err
}
tracker := newTracker()
tracker.push(path, path)
resolvedRoot, err := reader.safeResolveRefs(&root, tracker)
if err != nil {
return nil, tracker.errWithTrace(err.Error(), "")
}
return resolvedRoot, nil
}
func (reader *OpenapiReader) jobsDocs() (*Docs, error) {
jobSettingsSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "jobs.JobSettings")
if err != nil {
return nil, err
}
jobDocs := schemaToDocs(jobSettingsSchema)
// TODO: add description for id if needed.
// Tracked in https://github.com/databricks/cli/issues/242
jobsDocs := &Docs{
Description: "List of Databricks jobs",
AdditionalProperties: jobDocs,
}
return jobsDocs, nil
}
func (reader *OpenapiReader) pipelinesDocs() (*Docs, error) {
pipelineSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "pipelines.PipelineSpec")
if err != nil {
return nil, err
}
pipelineDocs := schemaToDocs(pipelineSpecSchema)
// TODO: Two fields in resources.Pipeline have the json tag id. Clarify the
// semantics and then add a description if needed. (https://github.com/databricks/cli/issues/242)
pipelinesDocs := &Docs{
Description: "List of DLT pipelines",
AdditionalProperties: pipelineDocs,
}
return pipelinesDocs, nil
}
func (reader *OpenapiReader) experimentsDocs() (*Docs, error) {
experimentSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "ml.Experiment")
if err != nil {
return nil, err
}
experimentDocs := schemaToDocs(experimentSpecSchema)
experimentsDocs := &Docs{
Description: "List of MLflow experiments",
AdditionalProperties: experimentDocs,
}
return experimentsDocs, nil
}
func (reader *OpenapiReader) modelsDocs() (*Docs, error) {
modelSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "ml.Model")
if err != nil {
return nil, err
}
modelDocs := schemaToDocs(modelSpecSchema)
modelsDocs := &Docs{
Description: "List of MLflow models",
AdditionalProperties: modelDocs,
}
return modelsDocs, nil
}
func (reader *OpenapiReader) modelServingEndpointsDocs() (*Docs, error) {
modelServingEndpointsSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "serving.CreateServingEndpoint")
if err != nil {
return nil, err
}
modelServingEndpointsDocs := schemaToDocs(modelServingEndpointsSpecSchema)
modelServingEndpointsAllDocs := &Docs{
Description: "List of Model Serving Endpoints",
AdditionalProperties: modelServingEndpointsDocs,
}
return modelServingEndpointsAllDocs, nil
}
func (reader *OpenapiReader) registeredModelDocs() (*Docs, error) {
registeredModelsSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "catalog.CreateRegisteredModelRequest")
if err != nil {
return nil, err
}
registeredModelsDocs := schemaToDocs(registeredModelsSpecSchema)
registeredModelsAllDocs := &Docs{
Description: "List of Registered Models",
AdditionalProperties: registeredModelsDocs,
}
return registeredModelsAllDocs, nil
}
func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) {
jobsDocs, err := reader.jobsDocs()
if err != nil {
return nil, err
}
pipelinesDocs, err := reader.pipelinesDocs()
if err != nil {
return nil, err
}
experimentsDocs, err := reader.experimentsDocs()
if err != nil {
return nil, err
}
modelsDocs, err := reader.modelsDocs()
if err != nil {
return nil, err
}
modelServingEndpointsDocs, err := reader.modelServingEndpointsDocs()
if err != nil {
return nil, err
}
registeredModelsDocs, err := reader.registeredModelDocs()
if err != nil {
return nil, err
}
return &Docs{
Description: "Collection of Databricks resources to deploy.",
Properties: map[string]*Docs{
"jobs": jobsDocs,
"pipelines": pipelinesDocs,
"experiments": experimentsDocs,
"models": modelsDocs,
"model_serving_endpoints": modelServingEndpointsDocs,
"registered_models": registeredModelsDocs,
},
}, nil
}

View File

@ -0,0 +1,493 @@
package schema
import (
"encoding/json"
"testing"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestReadSchemaForObject(t *testing.T) {
specString := `
{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mango": {
"type": "object",
"description": "a mango for my schema",
"$ref": "#/components/schemas/mango"
}
}
},
"mango": {
"type": "object",
"properties": {
"foo": {
"$ref": "#/components/schemas/foo"
}
}
}
}
}
}
`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mango": {
"type": "object",
"description": "a mango for my schema",
"properties": {
"foo": {
"type": "number"
}
}
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReadSchemaForArray(t *testing.T) {
specString := `
{
"components": {
"schemas": {
"fruits": {
"type": "object",
"description": "fruits that are cool",
"items": {
"description": "some papayas, because papayas are fruits too",
"$ref": "#/components/schemas/papaya"
}
},
"papaya": {
"type": "number"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"items": {
"type": "number",
"description": "some papayas, because papayas are fruits too"
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReadSchemaForMap(t *testing.T) {
specString := `{
"components": {
"schemas": {
"fruits": {
"type": "object",
"description": "fruits that are meh",
"additionalProperties": {
"description": "watermelons. watermelons.",
"$ref": "#/components/schemas/watermelon"
}
},
"watermelon": {
"type": "number"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are meh",
"additionalProperties": {
"type": "number",
"description": "watermelons. watermelons."
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestRootReferenceIsResolved(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "object",
"description": "this description is ignored",
"properties": {
"abc": {
"type": "string"
}
}
},
"fruits": {
"type": "object",
"description": "foo fighters fighting fruits",
"$ref": "#/components/schemas/foo"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
schema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(schema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "foo fighters fighting fruits",
"properties": {
"abc": {
"type": "string"
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestSelfReferenceLoopErrors(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "object",
"description": "this description is ignored",
"properties": {
"bar": {
"type": "object",
"$ref": "#/components/schemas/foo"
}
}
},
"fruits": {
"type": "object",
"description": "foo fighters fighting fruits",
"$ref": "#/components/schemas/foo"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
_, err = reader.readResolvedSchema("#/components/schemas/fruits")
assert.ErrorContains(t, err, "references loop detected. traversal trace: -> #/components/schemas/fruits -> #/components/schemas/foo")
}
func TestCrossReferenceLoopErrors(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "object",
"description": "this description is ignored",
"properties": {
"bar": {
"type": "object",
"$ref": "#/components/schemas/fruits"
}
}
},
"fruits": {
"type": "object",
"description": "foo fighters fighting fruits",
"$ref": "#/components/schemas/foo"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
_, err = reader.readResolvedSchema("#/components/schemas/fruits")
assert.ErrorContains(t, err, "references loop detected. traversal trace: -> #/components/schemas/fruits -> #/components/schemas/foo")
}
func TestReferenceResolutionForMapInObject(t *testing.T) {
specString := `
{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"$ref": "#/components/schemas/mango"
}
}
},
"mango": {
"type": "object",
"additionalProperties": {
"description": "a single mango",
"$ref": "#/components/schemas/foo"
}
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"additionalProperties": {
"type": "number",
"description": "a single mango"
}
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReferenceResolutionForArrayInObject(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"$ref": "#/components/schemas/mango"
}
}
},
"mango": {
"type": "object",
"items": {
"description": "a single mango",
"$ref": "#/components/schemas/foo"
}
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"items": {
"type": "number",
"description": "a single mango"
}
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReferenceResolutionDoesNotOverwriteDescriptions(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"properties": {
"guava": {
"type": "object",
"description": "Guava is a fruit",
"$ref": "#/components/schemas/foo"
},
"mango": {
"type": "object",
"description": "What is a mango?",
"$ref": "#/components/schemas/foo"
}
}
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"properties": {
"guava": {
"type": "number",
"description": "Guava is a fruit"
},
"mango": {
"type": "number",
"description": "What is a mango?"
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}

306
bundle/schema/schema.go Normal file
View File

@ -0,0 +1,306 @@
package schema
import (
"reflect"
"github.com/databricks/cli/libs/dyn/dynvar"
"github.com/databricks/cli/libs/jsonschema"
)
// // Fields tagged "readonly" should not be emitted in the schema as they are
// // computed at runtime, and should not be assigned a value by the bundle author.
// const readonlyTag = "readonly"
// // Annotation for internal bundle fields that should not be exposed to customers.
// // Fields can be tagged as "internal" to remove them from the generated schema.
// const internalTag = "internal"
// // Annotation for bundle fields that have been deprecated.
// // Fields tagged as "deprecated" are removed/omitted from the generated schema.
// const deprecatedTag = "deprecated"
// This function translates golang types into json schema. Here is the mapping
// between json schema types and golang types
//
// - GolangType -> Javascript type / Json Schema2
//
// - bool -> boolean
//
// - string -> string
//
// - int (all variants) -> number
//
// - float (all variants) -> number
//
// - map[string]MyStruct -> { type: object, additionalProperties: {}}
// for details visit: https://json-schema.org/understanding-json-schema/reference/object.html#additional-properties
//
// - []MyStruct -> {type: array, items: {}}
// for details visit: https://json-schema.org/understanding-json-schema/reference/array.html#items
//
// - []MyStruct -> {type: object, properties: {}, additionalProperties: false}
// for details visit: https://json-schema.org/understanding-json-schema/reference/object.html#properties
func New(golangType reflect.Type, docs *Docs) (*jsonschema.Schema, error) {
s, err := jsonschema.FromType(golangType, func(s jsonschema.Schema) jsonschema.Schema {
if s.Type == jsonschema.NumberType || s.Type == jsonschema.BooleanType {
s = jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{
Type: jsonschema.StringType,
// TODO: Narrow down the scope of the regex match.
// Also likely need to rename this variable.
Pattern: dynvar.VariableRegex,
},
},
}
}
return s
})
if err != nil {
return nil, err
}
return &s, nil
// tracker := newTracker()
// schema, err := safeToSchema(golangType, docs, "", tracker)
// if err != nil {
// return nil, tracker.errWithTrace(err.Error(), "root")
// }
// return schema, nil
}
// func jsonSchemaType(golangType reflect.Type) (jsonschema.Type, error) {
// switch golangType.Kind() {
// case reflect.Bool:
// return jsonschema.BooleanType, nil
// case reflect.String:
// return jsonschema.StringType, nil
// case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
// reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
// reflect.Float32, reflect.Float64:
// return jsonschema.NumberType, nil
// case reflect.Struct:
// return jsonschema.ObjectType, nil
// case reflect.Map:
// if golangType.Key().Kind() != reflect.String {
// return jsonschema.InvalidType, fmt.Errorf("only strings map keys are valid. key type: %v", golangType.Key().Kind())
// }
// return jsonschema.ObjectType, nil
// case reflect.Array, reflect.Slice:
// return jsonschema.ArrayType, nil
// default:
// return jsonschema.InvalidType, fmt.Errorf("unhandled golang type: %s", golangType)
// }
// }
// A wrapper over toSchema function to:
// 1. Detect cycles in the bundle config struct.
// 2. Update tracker
//
// params:
//
// - golangType: Golang type to generate json schema for
//
// - docs: Contains documentation to be injected into the generated json schema
//
// - traceId: An identifier for the current type, to trace recursive traversal.
// Its value is the first json tag in case of struct fields and "" in other cases
// like array, map or no json tags
//
// - tracker: Keeps track of types / traceIds seen during recursive traversal
// func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker *tracker) (*jsonschema.Schema, error) {
// // HACK to unblock CLI release (13th Feb 2024). This is temporary until proper
// // support for recursive types is added to the schema generator. PR: https://github.com/databricks/cli/pull/1204
// if traceId == "for_each_task" {
// return &jsonschema.Schema{
// Type: jsonschema.ObjectType,
// }, nil
// }
// // WE ERROR OUT IF THERE ARE CYCLES IN THE JSON SCHEMA
// // There are mechanisms to deal with cycles though recursive identifiers in json
// // schema. However if we use them, we would need to make sure we are able to detect
// // cycles where two properties (directly or indirectly) pointing to each other
// //
// // see: https://json-schema.org/understanding-json-schema/structuring.html#recursion
// // for details
// if tracker.hasCycle(golangType) {
// return nil, fmt.Errorf("cycle detected")
// }
// tracker.push(golangType, traceId)
// props, err := toSchema(golangType, docs, tracker)
// if err != nil {
// return nil, err
// }
// tracker.pop(golangType)
// return props, nil
// }
// This function returns all member fields of the provided type.
// If the type has embedded (aka anonymous) fields, this function traverses
// those in a breadth first manner
// func getStructFields(golangType reflect.Type) []reflect.StructField {
// fields := []reflect.StructField{}
// bfsQueue := list.New()
// for i := 0; i < golangType.NumField(); i++ {
// bfsQueue.PushBack(golangType.Field(i))
// }
// for bfsQueue.Len() > 0 {
// front := bfsQueue.Front()
// field := front.Value.(reflect.StructField)
// bfsQueue.Remove(front)
// if !field.Anonymous {
// fields = append(fields, field)
// continue
// }
// fieldType := field.Type
// if fieldType.Kind() == reflect.Pointer {
// fieldType = fieldType.Elem()
// }
// for i := 0; i < fieldType.NumField(); i++ {
// bfsQueue.PushBack(fieldType.Field(i))
// }
// }
// return fields
// }
// func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*jsonschema.Schema, error) {
// // *Struct and Struct generate identical json schemas
// if golangType.Kind() == reflect.Pointer {
// return safeToSchema(golangType.Elem(), docs, "", tracker)
// }
// if golangType.Kind() == reflect.Interface {
// return &jsonschema.Schema{}, nil
// }
// rootJavascriptType, err := jsonSchemaType(golangType)
// if err != nil {
// return nil, err
// }
// jsonSchema := &jsonschema.Schema{Type: rootJavascriptType}
// // If the type is a non-string primitive, then we allow it to be a string
// // provided it's a pure variable reference (ie only a single variable reference).
// if rootJavascriptType == jsonschema.BooleanType || rootJavascriptType == jsonschema.NumberType {
// jsonSchema = &jsonschema.Schema{
// AnyOf: []*jsonschema.Schema{
// {
// Type: rootJavascriptType,
// },
// {
// Type: jsonschema.StringType,
// Pattern: dynvar.VariableRegex,
// },
// },
// }
// }
// if docs != nil {
// jsonSchema.Description = docs.Description
// }
// // case array/slice
// if golangType.Kind() == reflect.Array || golangType.Kind() == reflect.Slice {
// elemGolangType := golangType.Elem()
// elemJavascriptType, err := jsonSchemaType(elemGolangType)
// if err != nil {
// return nil, err
// }
// var childDocs *Docs
// if docs != nil {
// childDocs = docs.Items
// }
// elemProps, err := safeToSchema(elemGolangType, childDocs, "", tracker)
// if err != nil {
// return nil, err
// }
// jsonSchema.Items = &jsonschema.Schema{
// Type: elemJavascriptType,
// Properties: elemProps.Properties,
// AdditionalProperties: elemProps.AdditionalProperties,
// Items: elemProps.Items,
// Required: elemProps.Required,
// }
// }
// // case map
// if golangType.Kind() == reflect.Map {
// if golangType.Key().Kind() != reflect.String {
// return nil, fmt.Errorf("only string keyed maps allowed")
// }
// var childDocs *Docs
// if docs != nil {
// childDocs = docs.AdditionalProperties
// }
// jsonSchema.AdditionalProperties, err = safeToSchema(golangType.Elem(), childDocs, "", tracker)
// if err != nil {
// return nil, err
// }
// }
// // case struct
// if golangType.Kind() == reflect.Struct {
// children := getStructFields(golangType)
// properties := map[string]*jsonschema.Schema{}
// required := []string{}
// for _, child := range children {
// bundleTag := child.Tag.Get("bundle")
// // Fields marked as "readonly", "internal" or "deprecated" are skipped
// // while generating the schema
// if bundleTag == readonlyTag || bundleTag == internalTag || bundleTag == deprecatedTag {
// continue
// }
// // get child json tags
// childJsonTag := strings.Split(child.Tag.Get("json"), ",")
// childName := childJsonTag[0]
// // skip children that have no json tags, the first json tag is ""
// // or the first json tag is "-"
// if childName == "" || childName == "-" {
// continue
// }
// // get docs for the child if they exist
// var childDocs *Docs
// if docs != nil {
// if val, ok := docs.Properties[childName]; ok {
// childDocs = val
// }
// }
// // compute if the child is a required field. Determined by the
// // presence of "omitempty" in the json tags
// hasOmitEmptyTag := false
// for i := 1; i < len(childJsonTag); i++ {
// if childJsonTag[i] == "omitempty" {
// hasOmitEmptyTag = true
// }
// }
// if !hasOmitEmptyTag {
// required = append(required, childName)
// }
// // compute Schema.Properties for the child recursively
// fieldProps, err := safeToSchema(child.Type, childDocs, childName, tracker)
// if err != nil {
// return nil, err
// }
// properties[childName] = fieldProps
// }
// jsonSchema.AdditionalProperties = false
// jsonSchema.Properties = properties
// jsonSchema.Required = required
// }
// return jsonSchema, nil
// }

View File

@ -0,0 +1,341 @@
package schema
import (
"encoding/json"
"reflect"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// TODO: Add a test that checks the primitive overrides for reference regexs work.
// Basically that the custom override for bundle regex works.
// TODO: Add a bundle of end to end tests, that both fail and pass the schema validation.
func TestDocIngestionForObject(t *testing.T) {
docs := &Docs{
Description: "docs for root",
Properties: map[string]*Docs{
"my_struct": {
Description: "docs for my struct",
Properties: map[string]*Docs{
"a": {
Description: "docs for a",
},
"c": {
Description: "docs for c which does not exist on my_struct",
},
},
},
},
}
type MyStruct struct {
A string `json:"a"`
B int `json:"b"`
}
type Root struct {
MyStruct *MyStruct `json:"my_struct"`
}
elem := Root{}
schema, err := New(reflect.TypeOf(elem), docs)
require.NoError(t, err)
jsonSchema, err := json.MarshalIndent(schema, " ", " ")
assert.NoError(t, err)
expectedSchema :=
`{
"type": "object",
"description": "docs for root",
"properties": {
"my_struct": {
"type": "object",
"description": "docs for my struct",
"properties": {
"a": {
"type": "string",
"description": "docs for a"
},
"b": {
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
}
]
}
},
"additionalProperties": false,
"required": [
"a",
"b"
]
}
},
"additionalProperties": false,
"required": [
"my_struct"
]
}`
t.Log("[DEBUG] actual: ", string(jsonSchema))
t.Log("[DEBUG] expected: ", expectedSchema)
assert.Equal(t, expectedSchema, string(jsonSchema))
}
func TestDocIngestionForSlice(t *testing.T) {
docs := &Docs{
Description: "docs for root",
Properties: map[string]*Docs{
"my_slice": {
Description: "docs for my slice",
Items: &Docs{
Properties: map[string]*Docs{
"guava": {
Description: "docs for guava",
},
"pineapple": {
Description: "docs for pineapple",
},
"watermelon": {
Description: "docs for watermelon which does not exist in schema",
},
},
},
},
},
}
type Bar struct {
Guava int `json:"guava"`
Pineapple int `json:"pineapple"`
}
type Root struct {
MySlice []Bar `json:"my_slice"`
}
elem := Root{}
schema, err := New(reflect.TypeOf(elem), docs)
require.NoError(t, err)
jsonSchema, err := json.MarshalIndent(schema, " ", " ")
assert.NoError(t, err)
expectedSchema :=
`{
"type": "object",
"description": "docs for root",
"properties": {
"my_slice": {
"type": "array",
"description": "docs for my slice",
"items": {
"type": "object",
"properties": {
"guava": {
"description": "docs for guava",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
}
]
},
"pineapple": {
"description": "docs for pineapple",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
}
]
}
},
"additionalProperties": false,
"required": [
"guava",
"pineapple"
]
}
}
},
"additionalProperties": false,
"required": [
"my_slice"
]
}`
t.Log("[DEBUG] actual: ", string(jsonSchema))
t.Log("[DEBUG] expected: ", expectedSchema)
assert.Equal(t, expectedSchema, string(jsonSchema))
}
func TestDocIngestionForMap(t *testing.T) {
docs := &Docs{
Description: "docs for root",
Properties: map[string]*Docs{
"my_map": {
Description: "docs for my map",
AdditionalProperties: &Docs{
Properties: map[string]*Docs{
"apple": {
Description: "docs for apple",
},
"mango": {
Description: "docs for mango",
},
"watermelon": {
Description: "docs for watermelon which does not exist in schema",
},
"papaya": {
Description: "docs for papaya which does not exist in schema",
},
},
},
},
},
}
type Foo struct {
Apple int `json:"apple"`
Mango int `json:"mango"`
}
type Root struct {
MyMap map[string]*Foo `json:"my_map"`
}
elem := Root{}
schema, err := New(reflect.TypeOf(elem), docs)
require.NoError(t, err)
jsonSchema, err := json.MarshalIndent(schema, " ", " ")
assert.NoError(t, err)
expectedSchema :=
`{
"type": "object",
"description": "docs for root",
"properties": {
"my_map": {
"type": "object",
"description": "docs for my map",
"additionalProperties": {
"type": "object",
"properties": {
"apple": {
"description": "docs for apple",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
}
]
},
"mango": {
"description": "docs for mango",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
}
]
}
},
"additionalProperties": false,
"required": [
"apple",
"mango"
]
}
}
},
"additionalProperties": false,
"required": [
"my_map"
]
}`
t.Log("[DEBUG] actual: ", string(jsonSchema))
t.Log("[DEBUG] expected: ", expectedSchema)
assert.Equal(t, expectedSchema, string(jsonSchema))
}
func TestDocIngestionForTopLevelPrimitive(t *testing.T) {
docs := &Docs{
Description: "docs for root",
Properties: map[string]*Docs{
"my_val": {
Description: "docs for my val",
},
},
}
type Root struct {
MyVal int `json:"my_val"`
}
elem := Root{}
schema, err := New(reflect.TypeOf(elem), docs)
require.NoError(t, err)
jsonSchema, err := json.MarshalIndent(schema, " ", " ")
assert.NoError(t, err)
expectedSchema :=
`{
"type": "object",
"description": "docs for root",
"properties": {
"my_val": {
"description": "docs for my val",
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
}
]
}
},
"additionalProperties": false,
"required": [
"my_val"
]
}`
t.Log("[DEBUG] actual: ", string(jsonSchema))
t.Log("[DEBUG] expected: ", expectedSchema)
assert.Equal(t, expectedSchema, string(jsonSchema))
}

11
bundle/schema/spec.go Normal file
View File

@ -0,0 +1,11 @@
package schema
import "github.com/databricks/cli/libs/jsonschema"
type Specification struct {
Components *Components `json:"components"`
}
type Components struct {
Schemas map[string]*jsonschema.Schema `json:"schemas,omitempty"`
}

53
bundle/schema/tracker.go Normal file
View File

@ -0,0 +1,53 @@
package schema
import (
"container/list"
"fmt"
)
type tracker struct {
// Nodes encountered in current path during the recursive traversal. Used to
// check for cycles
seenNodes map[interface{}]struct{}
// List of node names encountered in order in current path during the recursive traversal.
// Used to hydrate errors with path to the exact node where error occured.
//
// NOTE: node and node names can be the same
listOfNodes *list.List
}
func newTracker() *tracker {
return &tracker{
seenNodes: map[interface{}]struct{}{},
listOfNodes: list.New(),
}
}
func (t *tracker) errWithTrace(prefix string, initTrace string) error {
traceString := initTrace
curr := t.listOfNodes.Front()
for curr != nil {
if curr.Value.(string) != "" {
traceString += " -> " + curr.Value.(string)
}
curr = curr.Next()
}
return fmt.Errorf(prefix + ". traversal trace: " + traceString)
}
func (t *tracker) hasCycle(node interface{}) bool {
_, ok := t.seenNodes[node]
return ok
}
func (t *tracker) push(node interface{}, name string) {
t.seenNodes[node] = struct{}{}
t.listOfNodes.PushBack(name)
}
func (t *tracker) pop(nodeType interface{}) {
back := t.listOfNodes.Back()
t.listOfNodes.Remove(back)
delete(t.seenNodes, nodeType)
}

6
cmd/workspace/cmd.go generated
View File

@ -44,8 +44,6 @@ import (
permission_migration "github.com/databricks/cli/cmd/workspace/permission-migration" permission_migration "github.com/databricks/cli/cmd/workspace/permission-migration"
permissions "github.com/databricks/cli/cmd/workspace/permissions" permissions "github.com/databricks/cli/cmd/workspace/permissions"
pipelines "github.com/databricks/cli/cmd/workspace/pipelines" pipelines "github.com/databricks/cli/cmd/workspace/pipelines"
policy_compliance_for_clusters "github.com/databricks/cli/cmd/workspace/policy-compliance-for-clusters"
policy_compliance_for_jobs "github.com/databricks/cli/cmd/workspace/policy-compliance-for-jobs"
policy_families "github.com/databricks/cli/cmd/workspace/policy-families" policy_families "github.com/databricks/cli/cmd/workspace/policy-families"
provider_exchange_filters "github.com/databricks/cli/cmd/workspace/provider-exchange-filters" provider_exchange_filters "github.com/databricks/cli/cmd/workspace/provider-exchange-filters"
provider_exchanges "github.com/databricks/cli/cmd/workspace/provider-exchanges" provider_exchanges "github.com/databricks/cli/cmd/workspace/provider-exchanges"
@ -65,7 +63,6 @@ import (
recipients "github.com/databricks/cli/cmd/workspace/recipients" recipients "github.com/databricks/cli/cmd/workspace/recipients"
registered_models "github.com/databricks/cli/cmd/workspace/registered-models" registered_models "github.com/databricks/cli/cmd/workspace/registered-models"
repos "github.com/databricks/cli/cmd/workspace/repos" repos "github.com/databricks/cli/cmd/workspace/repos"
resource_quotas "github.com/databricks/cli/cmd/workspace/resource-quotas"
schemas "github.com/databricks/cli/cmd/workspace/schemas" schemas "github.com/databricks/cli/cmd/workspace/schemas"
secrets "github.com/databricks/cli/cmd/workspace/secrets" secrets "github.com/databricks/cli/cmd/workspace/secrets"
service_principals "github.com/databricks/cli/cmd/workspace/service-principals" service_principals "github.com/databricks/cli/cmd/workspace/service-principals"
@ -133,8 +130,6 @@ func All() []*cobra.Command {
out = append(out, permission_migration.New()) out = append(out, permission_migration.New())
out = append(out, permissions.New()) out = append(out, permissions.New())
out = append(out, pipelines.New()) out = append(out, pipelines.New())
out = append(out, policy_compliance_for_clusters.New())
out = append(out, policy_compliance_for_jobs.New())
out = append(out, policy_families.New()) out = append(out, policy_families.New())
out = append(out, provider_exchange_filters.New()) out = append(out, provider_exchange_filters.New())
out = append(out, provider_exchanges.New()) out = append(out, provider_exchanges.New())
@ -154,7 +149,6 @@ func All() []*cobra.Command {
out = append(out, recipients.New()) out = append(out, recipients.New())
out = append(out, registered_models.New()) out = append(out, registered_models.New())
out = append(out, repos.New()) out = append(out, repos.New())
out = append(out, resource_quotas.New())
out = append(out, schemas.New()) out = append(out, schemas.New())
out = append(out, secrets.New()) out = append(out, secrets.New())
out = append(out, service_principals.New()) out = append(out, service_principals.New())

View File

@ -75,7 +75,6 @@ func newCreate() *cobra.Command {
cmd.Flags().StringVar(&createReq.AccessPoint, "access-point", createReq.AccessPoint, `The AWS access point to use when accesing s3 for this external location.`) cmd.Flags().StringVar(&createReq.AccessPoint, "access-point", createReq.AccessPoint, `The AWS access point to use when accesing s3 for this external location.`)
cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`)
// TODO: complex arg: encryption_details // TODO: complex arg: encryption_details
cmd.Flags().BoolVar(&createReq.Fallback, "fallback", createReq.Fallback, `Indicates whether fallback mode is enabled for this external location.`)
cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`) cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`)
cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`) cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`)
@ -348,7 +347,6 @@ func newUpdate() *cobra.Command {
cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`)
cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`)
// TODO: complex arg: encryption_details // TODO: complex arg: encryption_details
cmd.Flags().BoolVar(&updateReq.Fallback, "fallback", updateReq.Fallback, `Indicates whether fallback mode is enabled for this external location.`)
cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`)
cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`) cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`)
cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the external location.`) cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the external location.`)

View File

@ -1,260 +0,0 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package policy_compliance_for_clusters
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "policy-compliance-for-clusters",
Short: `The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.`,
Long: `The policy compliance APIs allow you to view and manage the policy compliance
status of clusters in your workspace.
A cluster is compliant with its policy if its configuration satisfies all its
policy rules. Clusters could be out of compliance if their policy was updated
after the cluster was last edited.
The get and list compliance APIs allow you to view the policy compliance
status of a cluster. The enforce compliance API allows you to update a cluster
to be compliant with the current version of its policy.`,
GroupID: "compute",
Annotations: map[string]string{
"package": "compute",
},
}
// Add methods
cmd.AddCommand(newEnforceCompliance())
cmd.AddCommand(newGetCompliance())
cmd.AddCommand(newListCompliance())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start enforce-compliance command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var enforceComplianceOverrides []func(
*cobra.Command,
*compute.EnforceClusterComplianceRequest,
)
func newEnforceCompliance() *cobra.Command {
cmd := &cobra.Command{}
var enforceComplianceReq compute.EnforceClusterComplianceRequest
var enforceComplianceJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&enforceComplianceJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().BoolVar(&enforceComplianceReq.ValidateOnly, "validate-only", enforceComplianceReq.ValidateOnly, `If set, previews the changes that would be made to a cluster to enforce compliance but does not update the cluster.`)
cmd.Use = "enforce-compliance CLUSTER_ID"
cmd.Short = `Enforce cluster policy compliance.`
cmd.Long = `Enforce cluster policy compliance.
Updates a cluster to be compliant with the current version of its policy. A
cluster can be updated if it is in a RUNNING or TERMINATED state.
If a cluster is updated while in a RUNNING state, it will be restarted so
that the new attributes can take effect.
If a cluster is updated while in a TERMINATED state, it will remain
TERMINATED. The next time the cluster is started, the new attributes will
take effect.
Clusters created by the Databricks Jobs, DLT, or Models services cannot be
enforced by this API. Instead, use the "Enforce job policy compliance" API to
enforce policy compliance on jobs.
Arguments:
CLUSTER_ID: The ID of the cluster you want to enforce policy compliance on.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(0)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'cluster_id' in your JSON input")
}
return nil
}
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = enforceComplianceJson.Unmarshal(&enforceComplianceReq)
if err != nil {
return err
}
}
if !cmd.Flags().Changed("json") {
enforceComplianceReq.ClusterId = args[0]
}
response, err := w.PolicyComplianceForClusters.EnforceCompliance(ctx, enforceComplianceReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range enforceComplianceOverrides {
fn(cmd, &enforceComplianceReq)
}
return cmd
}
// start get-compliance command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getComplianceOverrides []func(
*cobra.Command,
*compute.GetClusterComplianceRequest,
)
func newGetCompliance() *cobra.Command {
cmd := &cobra.Command{}
var getComplianceReq compute.GetClusterComplianceRequest
// TODO: short flags
cmd.Use = "get-compliance CLUSTER_ID"
cmd.Short = `Get cluster policy compliance.`
cmd.Long = `Get cluster policy compliance.
Returns the policy compliance status of a cluster. Clusters could be out of
compliance if their policy was updated after the cluster was last edited.
Arguments:
CLUSTER_ID: The ID of the cluster to get the compliance status`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getComplianceReq.ClusterId = args[0]
response, err := w.PolicyComplianceForClusters.GetCompliance(ctx, getComplianceReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getComplianceOverrides {
fn(cmd, &getComplianceReq)
}
return cmd
}
// start list-compliance command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var listComplianceOverrides []func(
*cobra.Command,
*compute.ListClusterCompliancesRequest,
)
func newListCompliance() *cobra.Command {
cmd := &cobra.Command{}
var listComplianceReq compute.ListClusterCompliancesRequest
// TODO: short flags
cmd.Flags().IntVar(&listComplianceReq.PageSize, "page-size", listComplianceReq.PageSize, `Use this field to specify the maximum number of results to be returned by the server.`)
cmd.Flags().StringVar(&listComplianceReq.PageToken, "page-token", listComplianceReq.PageToken, `A page token that can be used to navigate to the next page or previous page as returned by next_page_token or prev_page_token.`)
cmd.Use = "list-compliance POLICY_ID"
cmd.Short = `List cluster policy compliance.`
cmd.Long = `List cluster policy compliance.
Returns the policy compliance status of all clusters that use a given policy.
Clusters could be out of compliance if their policy was updated after the
cluster was last edited.
Arguments:
POLICY_ID: Canonical unique identifier for the cluster policy.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
listComplianceReq.PolicyId = args[0]
response := w.PolicyComplianceForClusters.ListCompliance(ctx, listComplianceReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range listComplianceOverrides {
fn(cmd, &listComplianceReq)
}
return cmd
}
// end service PolicyComplianceForClusters

View File

@ -1,262 +0,0 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package policy_compliance_for_jobs
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "policy-compliance-for-jobs",
Short: `The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.`,
Long: `The compliance APIs allow you to view and manage the policy compliance status
of jobs in your workspace. This API currently only supports compliance
controls for cluster policies.
A job is in compliance if its cluster configurations satisfy the rules of all
their respective cluster policies. A job could be out of compliance if a
cluster policy it uses was updated after the job was last edited. The job is
considered out of compliance if any of its clusters no longer comply with
their updated policies.
The get and list compliance APIs allow you to view the policy compliance
status of a job. The enforce compliance API allows you to update a job so that
it becomes compliant with all of its policies.`,
GroupID: "jobs",
Annotations: map[string]string{
"package": "jobs",
},
}
// Add methods
cmd.AddCommand(newEnforceCompliance())
cmd.AddCommand(newGetCompliance())
cmd.AddCommand(newListCompliance())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start enforce-compliance command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var enforceComplianceOverrides []func(
*cobra.Command,
*jobs.EnforcePolicyComplianceRequest,
)
func newEnforceCompliance() *cobra.Command {
cmd := &cobra.Command{}
var enforceComplianceReq jobs.EnforcePolicyComplianceRequest
var enforceComplianceJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&enforceComplianceJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().BoolVar(&enforceComplianceReq.ValidateOnly, "validate-only", enforceComplianceReq.ValidateOnly, `If set, previews changes made to the job to comply with its policy, but does not update the job.`)
cmd.Use = "enforce-compliance JOB_ID"
cmd.Short = `Enforce job policy compliance.`
cmd.Long = `Enforce job policy compliance.
Updates a job so the job clusters that are created when running the job
(specified in new_cluster) are compliant with the current versions of their
respective cluster policies. All-purpose clusters used in the job will not be
updated.
Arguments:
JOB_ID: The ID of the job you want to enforce policy compliance on.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(0)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'job_id' in your JSON input")
}
return nil
}
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = enforceComplianceJson.Unmarshal(&enforceComplianceReq)
if err != nil {
return err
}
}
if !cmd.Flags().Changed("json") {
_, err = fmt.Sscan(args[0], &enforceComplianceReq.JobId)
if err != nil {
return fmt.Errorf("invalid JOB_ID: %s", args[0])
}
}
response, err := w.PolicyComplianceForJobs.EnforceCompliance(ctx, enforceComplianceReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range enforceComplianceOverrides {
fn(cmd, &enforceComplianceReq)
}
return cmd
}
// start get-compliance command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getComplianceOverrides []func(
*cobra.Command,
*jobs.GetPolicyComplianceRequest,
)
func newGetCompliance() *cobra.Command {
cmd := &cobra.Command{}
var getComplianceReq jobs.GetPolicyComplianceRequest
// TODO: short flags
cmd.Use = "get-compliance JOB_ID"
cmd.Short = `Get job policy compliance.`
cmd.Long = `Get job policy compliance.
Returns the policy compliance status of a job. Jobs could be out of compliance
if a cluster policy they use was updated after the job was last edited and
some of its job clusters no longer comply with their updated policies.
Arguments:
JOB_ID: The ID of the job whose compliance status you are requesting.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
_, err = fmt.Sscan(args[0], &getComplianceReq.JobId)
if err != nil {
return fmt.Errorf("invalid JOB_ID: %s", args[0])
}
response, err := w.PolicyComplianceForJobs.GetCompliance(ctx, getComplianceReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getComplianceOverrides {
fn(cmd, &getComplianceReq)
}
return cmd
}
// start list-compliance command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var listComplianceOverrides []func(
*cobra.Command,
*jobs.ListJobComplianceRequest,
)
func newListCompliance() *cobra.Command {
cmd := &cobra.Command{}
var listComplianceReq jobs.ListJobComplianceRequest
// TODO: short flags
cmd.Flags().IntVar(&listComplianceReq.PageSize, "page-size", listComplianceReq.PageSize, `Use this field to specify the maximum number of results to be returned by the server.`)
cmd.Flags().StringVar(&listComplianceReq.PageToken, "page-token", listComplianceReq.PageToken, `A page token that can be used to navigate to the next page or previous page as returned by next_page_token or prev_page_token.`)
cmd.Use = "list-compliance POLICY_ID"
cmd.Short = `List job policy compliance.`
cmd.Long = `List job policy compliance.
Returns the policy compliance status of all jobs that use a given policy. Jobs
could be out of compliance if a cluster policy they use was updated after the
job was last edited and its job clusters no longer comply with the updated
policy.
Arguments:
POLICY_ID: Canonical unique identifier for the cluster policy.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
listComplianceReq.PolicyId = args[0]
response := w.PolicyComplianceForJobs.ListCompliance(ctx, listComplianceReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range listComplianceOverrides {
fn(cmd, &listComplianceReq)
}
return cmd
}
// end service PolicyComplianceForJobs

View File

@ -16,9 +16,9 @@ var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command { func New() *cobra.Command {
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "query-history", Use: "query-history",
Short: `A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.`, Short: `A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT.`,
Long: `A service responsible for storing and retrieving the list of queries run Long: `A service responsible for storing and retrieving the list of queries run
against SQL endpoints and serverless compute.`, against SQL endpoints, serverless compute, and DLT.`,
GroupID: "sql", GroupID: "sql",
Annotations: map[string]string{ Annotations: map[string]string{
"package": "sql", "package": "sql",
@ -53,7 +53,6 @@ func newList() *cobra.Command {
// TODO: short flags // TODO: short flags
// TODO: complex arg: filter_by // TODO: complex arg: filter_by
cmd.Flags().BoolVar(&listReq.IncludeMetrics, "include-metrics", listReq.IncludeMetrics, `Whether to include the query metrics with each query.`)
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`) cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`)
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`)
@ -61,7 +60,8 @@ func newList() *cobra.Command {
cmd.Short = `List Queries.` cmd.Short = `List Queries.`
cmd.Long = `List Queries. cmd.Long = `List Queries.
List the history of queries through SQL warehouses, and serverless compute. List the history of queries through SQL warehouses, serverless compute, and
DLT.
You can filter by user ID, warehouse ID, status, and time range. Most recently You can filter by user ID, warehouse ID, status, and time range. Most recently
started queries are returned first (up to max_results in request). The started queries are returned first (up to max_results in request). The

View File

@ -1,168 +0,0 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package resource_quotas
import (
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "resource-quotas",
Short: `Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.`,
Long: `Unity Catalog enforces resource quotas on all securable objects, which limits
the number of resources that can be created. Quotas are expressed in terms of
a resource type and a parent (for example, tables per metastore or schemas per
catalog). The resource quota APIs enable you to monitor your current usage and
limits. For more information on resource quotas see the [Unity Catalog
documentation].
[Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas`,
GroupID: "catalog",
Annotations: map[string]string{
"package": "catalog",
},
}
// Add methods
cmd.AddCommand(newGetQuota())
cmd.AddCommand(newListQuotas())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start get-quota command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getQuotaOverrides []func(
*cobra.Command,
*catalog.GetQuotaRequest,
)
func newGetQuota() *cobra.Command {
cmd := &cobra.Command{}
var getQuotaReq catalog.GetQuotaRequest
// TODO: short flags
cmd.Use = "get-quota PARENT_SECURABLE_TYPE PARENT_FULL_NAME QUOTA_NAME"
cmd.Short = `Get information for a single resource quota.`
cmd.Long = `Get information for a single resource quota.
The GetQuota API returns usage information for a single resource quota,
defined as a child-parent pair. This API also refreshes the quota count if it
is out of date. Refreshes are triggered asynchronously. The updated count
might not be returned in the first call.
Arguments:
PARENT_SECURABLE_TYPE: Securable type of the quota parent.
PARENT_FULL_NAME: Full name of the parent resource. Provide the metastore ID if the parent
is a metastore.
QUOTA_NAME: Name of the quota. Follows the pattern of the quota type, with "-quota"
added as a suffix.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(3)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
getQuotaReq.ParentSecurableType = args[0]
getQuotaReq.ParentFullName = args[1]
getQuotaReq.QuotaName = args[2]
response, err := w.ResourceQuotas.GetQuota(ctx, getQuotaReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getQuotaOverrides {
fn(cmd, &getQuotaReq)
}
return cmd
}
// start list-quotas command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var listQuotasOverrides []func(
*cobra.Command,
*catalog.ListQuotasRequest,
)
func newListQuotas() *cobra.Command {
cmd := &cobra.Command{}
var listQuotasReq catalog.ListQuotasRequest
// TODO: short flags
cmd.Flags().IntVar(&listQuotasReq.MaxResults, "max-results", listQuotasReq.MaxResults, `The number of quotas to return.`)
cmd.Flags().StringVar(&listQuotasReq.PageToken, "page-token", listQuotasReq.PageToken, `Opaque token for the next page of results.`)
cmd.Use = "list-quotas"
cmd.Short = `List all resource quotas under a metastore.`
cmd.Long = `List all resource quotas under a metastore.
ListQuotas returns all quota values under the metastore. There are no SLAs on
the freshness of the counts returned. This API does not trigger a refresh of
quota counts.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response := w.ResourceQuotas.ListQuotas(ctx, listQuotasReq)
return cmdio.RenderIterator(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range listQuotasOverrides {
fn(cmd, &listQuotasReq)
}
return cmd
}
// end service ResourceQuotas

2
go.mod
View File

@ -5,7 +5,7 @@ go 1.22
require ( require (
github.com/Masterminds/semver/v3 v3.2.1 // MIT github.com/Masterminds/semver/v3 v3.2.1 // MIT
github.com/briandowns/spinner v1.23.1 // Apache 2.0 github.com/briandowns/spinner v1.23.1 // Apache 2.0
github.com/databricks/databricks-sdk-go v0.45.0 // Apache 2.0 github.com/databricks/databricks-sdk-go v0.44.0 // Apache 2.0
github.com/fatih/color v1.17.0 // MIT github.com/fatih/color v1.17.0 // MIT
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/google/uuid v1.6.0 // BSD-3-Clause

4
go.sum generated
View File

@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.45.0 h1:wdx5Wm/ESrahdHeq62WrjLeGjV4r722LLanD8ahI0Mo= github.com/databricks/databricks-sdk-go v0.44.0 h1:9/FZACv4EFQIOYxfwYVKnY7v46xio9FKCw9tpKB2O/s=
github.com/databricks/databricks-sdk-go v0.45.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/databricks/databricks-sdk-go v0.44.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=

View File

@ -6,9 +6,9 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
const ReferenceRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}` const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`
var re = regexp.MustCompile(ReferenceRegex) var re = regexp.MustCompile(VariableRegex)
// ref represents a variable reference. // ref represents a variable reference.
// It is a string [dyn.Value] contained in a larger [dyn.Value]. // It is a string [dyn.Value] contained in a larger [dyn.Value].

View File

@ -3,13 +3,17 @@ package jsonschema
import ( import (
"container/list" "container/list"
"fmt" "fmt"
"maps"
"path" "path"
"reflect" "reflect"
"slices" "slices"
"strings" "strings"
) )
// TODO: Maybe can be removed?
var InvalidSchema = Schema{
Type: InvalidType,
}
// Fields tagged "readonly" should not be emitted in the schema as they are // Fields tagged "readonly" should not be emitted in the schema as they are
// computed at runtime, and should not be assigned a value by the bundle author. // computed at runtime, and should not be assigned a value by the bundle author.
const readonlyTag = "readonly" const readonlyTag = "readonly"
@ -22,17 +26,19 @@ const internalTag = "internal"
// Fields tagged as "deprecated" are removed/omitted from the generated schema. // Fields tagged as "deprecated" are removed/omitted from the generated schema.
const deprecatedTag = "deprecated" const deprecatedTag = "deprecated"
// TODO: Test what happens with invalid cycles? Do integration tests fail?
// TODO: Call out in the PR description that recursive types like "for_each_task"
// are now supported.
type constructor struct { type constructor struct {
// Map of typ.PkgPath() + "." + typ.Name() to the schema for that type. // Map of typ.PkgPath() + "." + typ.Name() to the schema for that type.
// Example key: github.com/databricks/databricks-sdk-go/service/jobs.JobSettings // Example key: github.com/databricks/databricks-sdk-go/service/jobs.JobSettings
definitions map[string]Schema definitions map[string]Schema
// Map of typ.PkgPath() + "." + typ.Name() to the corresponding type. Used to seen map[string]struct{}
// track types that have been seen to avoid infinite recursion.
seen map[string]reflect.Type
// The root type for which the schema is being generated. // Transformation function to apply after generating a node in the schema.
root reflect.Type fn func(s Schema) Schema
} }
// The $defs block in a JSON schema cannot contain "/", otherwise it will not be // The $defs block in a JSON schema cannot contain "/", otherwise it will not be
@ -41,19 +47,13 @@ type constructor struct {
// //
// For example: // For example:
// {"a/b/c": "value"} is converted to {"a": {"b": {"c": "value"}}} // {"a/b/c": "value"} is converted to {"a": {"b": {"c": "value"}}}
func (c *constructor) Definitions() any { func (c *constructor) nestedDefinitions() any {
defs := maps.Clone(c.definitions) if len(c.definitions) == 0 {
// Remove the root type from the definitions. No need to include it in the
// definitions.
delete(defs, typePath(c.root))
if len(defs) == 0 {
return nil return nil
} }
res := make(map[string]any) res := make(map[string]any)
for k, v := range defs { for k, v := range c.definitions {
parts := strings.Split(k, "/") parts := strings.Split(k, "/")
cur := res cur := res
for i, p := range parts { for i, p := range parts {
@ -72,76 +72,47 @@ func (c *constructor) Definitions() any {
return res return res
} }
// FromType converts a reflect.Type to a jsonschema.Schema. Nodes in the final JSON // TODO: Skip generating schema for interface fields.
// schema are guaranteed to be one level deep, which is done using defining $defs func FromType(typ reflect.Type, fn func(s Schema) Schema) (Schema, error) {
// for every Go type and referring them using $ref in the corresponding node in
// the JSON schema.
//
// fns is a list of transformation functions that will be applied to all $defs
// in the schema.
func FromType(typ reflect.Type, fns []func(typ reflect.Type, s Schema) Schema) (Schema, error) {
c := constructor{ c := constructor{
definitions: make(map[string]Schema), definitions: make(map[string]Schema),
seen: make(map[string]reflect.Type), seen: make(map[string]struct{}),
root: typ, fn: fn,
} }
err := c.walk(typ) err := c.walk(typ)
if err != nil { if err != nil {
return Schema{}, err return InvalidSchema, err
}
for _, fn := range fns {
for k, v := range c.definitions {
c.definitions[k] = fn(c.seen[k], v)
}
} }
res := c.definitions[typePath(typ)] res := c.definitions[typePath(typ)]
res.Definitions = c.Definitions() // No need to include the root type in the definitions.
delete(c.definitions, typePath(typ))
res.Definitions = c.nestedDefinitions()
return res, nil return res, nil
} }
// typePath computes a unique string representation of the type. $ref in the generated
// JSON schema will refer to this path. See TestTypePath for examples outputs.
func typePath(typ reflect.Type) string { func typePath(typ reflect.Type) string {
// Pointers have a typ.Name() of "". Dereference them to get the underlying type. // Pointers have a typ.Name() of "". Dereference them to get the underlying type.
for typ.Kind() == reflect.Ptr { for typ.Kind() == reflect.Ptr {
typ = typ.Elem() typ = typ.Elem()
} }
// typ.Name() resolves to "" for any type.
if typ.Kind() == reflect.Interface { if typ.Kind() == reflect.Interface {
return "interface" return "interface"
} }
// Recursively call typePath, to handle slices of slices / maps. // For built-in types, return the type name directly.
if typ.Kind() == reflect.Slice { if typ.PkgPath() == "" {
return path.Join("slice", typePath(typ.Elem()))
}
if typ.Kind() == reflect.Map {
if typ.Key().Kind() != reflect.String {
panic(fmt.Sprintf("found map with non-string key: %v", typ.Key()))
}
// Recursively call typePath, to handle maps of maps / slices.
return path.Join("map", typePath(typ.Elem()))
}
switch {
case typ.PkgPath() != "" && typ.Name() != "":
return typ.PkgPath() + "." + typ.Name()
case typ.Name() != "":
return typ.Name() return typ.Name()
default:
// Invariant. This function should return a non-empty string
// for all types.
panic("unexpected empty type name for type: " + typ.String())
} }
return strings.Join([]string{typ.PkgPath(), typ.Name()}, ".")
} }
// Walk the Go type, generating $defs for every type encountered, and populating // TODO: would a worked based model fit better here? Is this internal API not
// the corresponding $ref in the JSON schema. // the right fit?
func (c *constructor) walk(typ reflect.Type) error { func (c *constructor) walk(typ reflect.Type) error {
// Dereference pointers if necessary. // Dereference pointers if necessary.
for typ.Kind() == reflect.Ptr { for typ.Kind() == reflect.Ptr {
@ -150,11 +121,10 @@ func (c *constructor) walk(typ reflect.Type) error {
typPath := typePath(typ) typPath := typePath(typ)
// Return early if the type has already been seen, to avoid infinite recursion. // Keep track of seen types to avoid infinite recursion.
if _, ok := c.seen[typPath]; ok { if _, ok := c.seen[typPath]; !ok {
return nil c.seen[typPath] = struct{}{}
} }
c.seen[typPath] = typ
// Return early directly if it's already been processed. // Return early directly if it's already been processed.
if _, ok := c.definitions[typPath]; ok { if _, ok := c.definitions[typPath]; ok {
@ -164,6 +134,7 @@ func (c *constructor) walk(typ reflect.Type) error {
var s Schema var s Schema
var err error var err error
// TODO: Narrow / widen down the number of Go types handled here.
switch typ.Kind() { switch typ.Kind() {
case reflect.Struct: case reflect.Struct:
s, err = c.fromTypeStruct(typ) s, err = c.fromTypeStruct(typ)
@ -171,20 +142,20 @@ func (c *constructor) walk(typ reflect.Type) error {
s, err = c.fromTypeSlice(typ) s, err = c.fromTypeSlice(typ)
case reflect.Map: case reflect.Map:
s, err = c.fromTypeMap(typ) s, err = c.fromTypeMap(typ)
// TODO: Should the primitive functions below be inlined?
case reflect.String: case reflect.String:
s = Schema{Type: StringType} s = Schema{Type: StringType}
case reflect.Bool: case reflect.Bool:
s = Schema{Type: BooleanType} s = Schema{Type: BooleanType}
case reflect.Int, reflect.Int32, reflect.Int64: // TODO: Add comment about reduced coverage of primitive Go types in the code paths here.
case reflect.Int:
s = Schema{Type: IntegerType} s = Schema{Type: IntegerType}
case reflect.Float32, reflect.Float64: case reflect.Float32, reflect.Float64:
s = Schema{Type: NumberType} s = Schema{Type: NumberType}
case reflect.Interface: case reflect.Interface:
// Interface or any types are not serialized to JSON by the default JSON // An interface value can never be serialized from text, and thus is explicitly
// unmarshaller (json.Unmarshal). They likely thus are parsed by the // set to null and disallowed in the schema.
// dynamic configuration tree and we should support arbitary values here. s = Schema{Type: NullType}
// Eg: variables.default can be anything.
s = Schema{}
default: default:
return fmt.Errorf("unsupported type: %s", typ.Kind()) return fmt.Errorf("unsupported type: %s", typ.Kind())
} }
@ -192,7 +163,13 @@ func (c *constructor) walk(typ reflect.Type) error {
return err return err
} }
// Store the computed JSON schema for the type. if c.fn != nil {
s = c.fn(s)
}
// Store definition for the type if it's part of a Go package and not a built-in type.
// TODO: Apply transformation at the end, to all definitions instead of
// during recursive traversal?
c.definitions[typPath] = s c.definitions[typPath] = s
return nil return nil
} }
@ -229,15 +206,20 @@ func getStructFields(typ reflect.Type) []reflect.StructField {
return fields return fields
} }
// TODO: get rid of the errors here and panic instead?
func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) { func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) {
if typ.Kind() != reflect.Struct { if typ.Kind() != reflect.Struct {
return Schema{}, fmt.Errorf("expected struct, got %s", typ.Kind()) return InvalidSchema, fmt.Errorf("expected struct, got %s", typ.Kind())
} }
res := Schema{ res := Schema{
Type: ObjectType, Type: ObjectType,
Properties: make(map[string]*Schema),
Required: []string{}, Properties: make(map[string]*Schema),
// TODO: Confirm that empty arrays are not serialized.
Required: []string{},
AdditionalProperties: false, AdditionalProperties: false,
} }
@ -258,7 +240,6 @@ func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) {
if jsonTags[0] == "" || jsonTags[0] == "-" || !structField.IsExported() { if jsonTags[0] == "" || jsonTags[0] == "-" || !structField.IsExported() {
continue continue
} }
// "omitempty" tags in the Go SDK structs represent fields that not are // "omitempty" tags in the Go SDK structs represent fields that not are
// required to be present in the API payload. Thus its absence in the // required to be present in the API payload. Thus its absence in the
// tags list indicates that the field is required. // tags list indicates that the field is required.
@ -266,16 +247,19 @@ func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) {
res.Required = append(res.Required, jsonTags[0]) res.Required = append(res.Required, jsonTags[0])
} }
// Walk the fields of the struct.
typPath := typePath(structField.Type) typPath := typePath(structField.Type)
err := c.walk(structField.Type) // Only walk if the type has not been seen yet.
if err != nil { if _, ok := c.seen[typPath]; !ok {
return Schema{}, err // Trigger call to fromType, to recursively generate definitions for
// the struct field.
err := c.walk(structField.Type)
if err != nil {
return InvalidSchema, err
}
} }
// For every property in the struct, add a $ref to the corresponding
// $defs block.
refPath := path.Join("#/$defs", typPath) refPath := path.Join("#/$defs", typPath)
// For non-built-in types, refer to the definition.
res.Properties[jsonTags[0]] = &Schema{ res.Properties[jsonTags[0]] = &Schema{
Reference: &refPath, Reference: &refPath,
} }
@ -284,9 +268,11 @@ func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) {
return res, nil return res, nil
} }
// TODO: Add comments explaining the translation between struct, map, slice and
// the JSON schema representation.
func (c *constructor) fromTypeSlice(typ reflect.Type) (Schema, error) { func (c *constructor) fromTypeSlice(typ reflect.Type) (Schema, error) {
if typ.Kind() != reflect.Slice { if typ.Kind() != reflect.Slice {
return Schema{}, fmt.Errorf("expected slice, got %s", typ.Kind()) return InvalidSchema, fmt.Errorf("expected slice, got %s", typ.Kind())
} }
res := Schema{ res := Schema{
@ -294,16 +280,19 @@ func (c *constructor) fromTypeSlice(typ reflect.Type) (Schema, error) {
} }
typPath := typePath(typ.Elem()) typPath := typePath(typ.Elem())
// Only walk if the type has not been seen yet.
// Walk the slice element type. if _, ok := c.seen[typPath]; !ok {
err := c.walk(typ.Elem()) // Trigger call to fromType, to recursively generate definitions for
if err != nil { // the slice element.
return Schema{}, err err := c.walk(typ.Elem())
if err != nil {
return InvalidSchema, err
}
} }
refPath := path.Join("#/$defs", typPath) refPath := path.Join("#/$defs", typPath)
// Add a $ref to the corresponding $defs block for the slice element type. // For non-built-in types, refer to the definition
res.Items = &Schema{ res.Items = &Schema{
Reference: &refPath, Reference: &refPath,
} }
@ -312,11 +301,11 @@ func (c *constructor) fromTypeSlice(typ reflect.Type) (Schema, error) {
func (c *constructor) fromTypeMap(typ reflect.Type) (Schema, error) { func (c *constructor) fromTypeMap(typ reflect.Type) (Schema, error) {
if typ.Kind() != reflect.Map { if typ.Kind() != reflect.Map {
return Schema{}, fmt.Errorf("expected map, got %s", typ.Kind()) return InvalidSchema, fmt.Errorf("expected map, got %s", typ.Kind())
} }
if typ.Key().Kind() != reflect.String { if typ.Key().Kind() != reflect.String {
return Schema{}, fmt.Errorf("found map with non-string key: %v", typ.Key()) return InvalidSchema, fmt.Errorf("found map with non-string key: %v", typ.Key())
} }
res := Schema{ res := Schema{
@ -324,16 +313,19 @@ func (c *constructor) fromTypeMap(typ reflect.Type) (Schema, error) {
} }
typPath := typePath(typ.Elem()) typPath := typePath(typ.Elem())
// Only walk if the type has not been seen yet.
// Walk the map value type. if _, ok := c.seen[typPath]; !ok {
err := c.walk(typ.Elem()) // Trigger call to fromType, to recursively generate definitions for
if err != nil { // the map value.
return Schema{}, err err := c.walk(typ.Elem())
if err != nil {
return InvalidSchema, err
}
} }
refPath := path.Join("#/$defs", typPath) refPath := path.Join("#/$defs", typPath)
// Add a $ref to the corresponding $defs block for the map value type. // For non-built-in types, refer to the definition
res.AdditionalProperties = &Schema{ res.AdditionalProperties = &Schema{
Reference: &refPath, Reference: &refPath,
} }

View File

@ -67,7 +67,9 @@ func TestFromTypeBasic(t *testing.T) {
expected: Schema{ expected: Schema{
Type: "object", Type: "object",
Definitions: map[string]any{ Definitions: map[string]any{
"interface": Schema{}, "interface": Schema{
Type: "null",
},
"string": Schema{ "string": Schema{
Type: "string", Type: "string",
}, },
@ -127,6 +129,15 @@ func TestFromTypeBasic(t *testing.T) {
s, err := FromType(tc.typ, nil) s, err := FromType(tc.typ, nil)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, tc.expected, s) assert.Equal(t, tc.expected, s)
// jsonSchema, err := json.MarshalIndent(s, " ", " ")
// assert.NoError(t, err)
// expectedJson, err := json.MarshalIndent(tc.expected, " ", " ")
// assert.NoError(t, err)
// t.Log("[DEBUG] actual: ", string(jsonSchema))
// t.Log("[DEBUG] expected: ", string(expectedJson))
}) })
} }
} }
@ -211,7 +222,7 @@ func TestFromTypeNested(t *testing.T) {
}, },
{ {
name: "struct as a map value", name: "struct as a map value",
typ: reflect.TypeOf(map[string]*Inner{}), typ: reflect.TypeOf(map[string]Inner{}),
expected: Schema{ expected: Schema{
Type: "object", Type: "object",
Definitions: expectedDefinitions, Definitions: expectedDefinitions,
@ -241,6 +252,7 @@ func TestFromTypeNested(t *testing.T) {
} }
} }
// TODO: Call out in the PR description that recursive Go types are supported.
func TestFromTypeRecursive(t *testing.T) { func TestFromTypeRecursive(t *testing.T) {
fooRef := "#/$defs/github.com/databricks/cli/libs/jsonschema/test_types.Foo" fooRef := "#/$defs/github.com/databricks/cli/libs/jsonschema/test_types.Foo"
barRef := "#/$defs/github.com/databricks/cli/libs/jsonschema/test_types.Bar" barRef := "#/$defs/github.com/databricks/cli/libs/jsonschema/test_types.Bar"
@ -342,122 +354,7 @@ func TestFromTypeSelfReferential(t *testing.T) {
} }
func TestFromTypeError(t *testing.T) { func TestFromTypeError(t *testing.T) {
// Maps with non-string keys should panic.
type mapOfInts map[int]int type mapOfInts map[int]int
assert.PanicsWithValue(t, "found map with non-string key: int", func() { _, err := FromType(reflect.TypeOf(mapOfInts{}), nil)
FromType(reflect.TypeOf(mapOfInts{}), nil) assert.EqualError(t, err, "found map with non-string key: int")
})
// Unsupported types should return an error.
_, err := FromType(reflect.TypeOf(complex64(0)), nil)
assert.EqualError(t, err, "unsupported type: complex64")
}
func TestFromTypeFunctionsArg(t *testing.T) {
type myStruct struct {
S string `json:"s"`
}
strRef := "#/$defs/string"
expected := Schema{
Type: "object",
Definitions: map[string]any{
"string": Schema{
Type: "string",
Description: "a string",
Enum: []any{"a", "b", "c"},
},
},
Properties: map[string]*Schema{
"s": {
Reference: &strRef,
},
},
AdditionalProperties: false,
Required: []string{"s"},
}
addDescription := func(typ reflect.Type, s Schema) Schema {
if typ.Kind() != reflect.String {
return s
}
s.Description = "a string"
return s
}
addEnums := func(typ reflect.Type, s Schema) Schema {
if typ.Kind() != reflect.String {
return s
}
s.Enum = []any{"a", "b", "c"}
return s
}
s, err := FromType(reflect.TypeOf(myStruct{}), []func(reflect.Type, Schema) Schema{
addDescription,
addEnums,
})
assert.NoError(t, err)
assert.Equal(t, expected, s)
}
func TestTypePath(t *testing.T) {
type myStruct struct{}
tcases := []struct {
typ reflect.Type
path string
}{
{
typ: reflect.TypeOf(""),
path: "string",
},
{
typ: reflect.TypeOf(int(0)),
path: "int",
},
{
typ: reflect.TypeOf(true),
path: "bool",
},
{
typ: reflect.TypeOf(float64(0)),
path: "float64",
},
{
typ: reflect.TypeOf(myStruct{}),
path: "github.com/databricks/cli/libs/jsonschema.myStruct",
},
{
typ: reflect.TypeOf([]int{}),
path: "slice/int",
},
{
typ: reflect.TypeOf(map[string]int{}),
path: "map/int",
},
{
typ: reflect.TypeOf([]myStruct{}),
path: "slice/github.com/databricks/cli/libs/jsonschema.myStruct",
},
{
typ: reflect.TypeOf([][]map[string]map[string]myStruct{}),
path: "slice/slice/map/map/github.com/databricks/cli/libs/jsonschema.myStruct",
},
{
typ: reflect.TypeOf(map[string]myStruct{}),
path: "map/github.com/databricks/cli/libs/jsonschema.myStruct",
},
}
for _, tc := range tcases {
t.Run(tc.typ.String(), func(t *testing.T) {
assert.Equal(t, tc.path, typePath(tc.typ))
})
}
// Maps with non-string keys should panic.
assert.PanicsWithValue(t, "found map with non-string key: int", func() {
typePath(reflect.TypeOf(map[int]int{}))
})
} }