Generate equivalent Go types from Terraform provider schema (#122)

It contains:
* `codegen` -- this turns the schema of the Databricks Terraform provider into Go types.
* `schema` -- the output of the above.
This commit is contained in:
Pieter Noordhuis 2022-12-06 16:26:19 +01:00 committed by GitHub
parent d9d295f2a9
commit ff89c9d06f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
115 changed files with 4463 additions and 0 deletions

2
bundle/internal/tf/codegen/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/codegen
/tmp

View File

@ -0,0 +1,15 @@
Use this tool to generate equivalent Go types from Terraform provider schema.
## Usage
The entry point for this tool is `.`.
It uses `./tmp` a temporary data directory and `../schema` as output directory.
It automatically installs the Terraform binary as well as the Databricks Terraform provider.
Run with:
```go
go run .
```

View File

@ -0,0 +1,109 @@
package generator
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"text/template"
tfjson "github.com/hashicorp/terraform-json"
)
func normalizeName(name string) string {
return strings.TrimPrefix(name, "databricks_")
}
type collection struct {
OutputFile string
Blocks []*namedBlock
}
func (c *collection) Generate(path string) error {
tmpl := template.Must(template.ParseFiles(fmt.Sprintf("./templates/%s.tmpl", c.OutputFile)))
f, err := os.Create(filepath.Join(path, c.OutputFile))
if err != nil {
return err
}
defer f.Close()
return tmpl.Execute(f, c)
}
func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error {
// Generate types for resources.
var resources []*namedBlock
for _, k := range sortKeys(schema.ResourceSchemas) {
v := schema.ResourceSchemas[k]
b := &namedBlock{
filePattern: "resource_%s.go",
typeNamePrefix: "Resource",
name: k,
block: v.Block,
}
err := b.Generate(path)
if err != nil {
return err
}
resources = append(resources, b)
}
// Generate types for data sources.
var dataSources []*namedBlock
for _, k := range sortKeys(schema.DataSourceSchemas) {
v := schema.DataSourceSchemas[k]
b := &namedBlock{
filePattern: "data_source_%s.go",
typeNamePrefix: "DataSource",
name: k,
block: v.Block,
}
err := b.Generate(path)
if err != nil {
return err
}
dataSources = append(dataSources, b)
}
// Generate type for provider configuration.
{
b := &namedBlock{
filePattern: "%s.go",
typeNamePrefix: "",
name: "config",
block: schema.ConfigSchema.Block,
}
err := b.Generate(path)
if err != nil {
return err
}
}
// Generate resources.go
{
cr := &collection{
OutputFile: "resources.go",
Blocks: resources,
}
err := cr.Generate(path)
if err != nil {
return err
}
}
// Generate data_sources.go
{
cr := &collection{
OutputFile: "data_sources.go",
Blocks: dataSources,
}
err := cr.Generate(path)
if err != nil {
return err
}
}
return nil
}

View File

@ -0,0 +1,60 @@
package generator
import (
"fmt"
"os"
"path/filepath"
"strings"
"text/template"
tfjson "github.com/hashicorp/terraform-json"
"github.com/iancoleman/strcase"
)
type namedBlock struct {
filePattern string
typeNamePrefix string
name string
block *tfjson.SchemaBlock
}
func (b *namedBlock) FieldName() string {
return b.camelName()
}
func (b *namedBlock) TypeBase() []string {
return []string{b.typeNamePrefix, b.camelName()}
}
func (b *namedBlock) TypeName() string {
return strings.Join(b.TypeBase(), "")
}
func (b *namedBlock) TerraformName() string {
return b.name
}
func (b *namedBlock) normalizedName() string {
return normalizeName(b.name)
}
func (b *namedBlock) camelName() string {
return strcase.ToCamel(b.normalizedName())
}
func (b *namedBlock) Generate(path string) error {
w, err := walk(b.block, []string{b.typeNamePrefix, b.camelName()})
if err != nil {
return err
}
f, err := os.Create(filepath.Join(path, fmt.Sprintf(b.filePattern, b.normalizedName())))
if err != nil {
return err
}
defer f.Close()
tmpl := template.Must(template.ParseFiles("./templates/block.go.tmpl"))
return tmpl.Execute(f, w)
}

View File

@ -0,0 +1,13 @@
package generator
import (
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
)
// sortKeys returns a sorted copy of the keys in the specified map.
func sortKeys[M ~map[K]V, K string, V any](m M) []K {
keys := maps.Keys(m)
slices.Sort(keys)
return keys
}

View File

@ -0,0 +1,150 @@
package generator
import (
"fmt"
"strings"
tfjson "github.com/hashicorp/terraform-json"
"github.com/iancoleman/strcase"
"github.com/zclconf/go-cty/cty"
"golang.org/x/exp/slices"
)
type field struct {
Name string
Type string
Tag string
}
type structType struct {
Name string
Fields []field
}
// walker represents the set of types to declare to
// represent a [tfjson.SchemaBlock] as Go structs.
// See the [walk] function for usage.
type walker struct {
StructTypes []structType
}
func processAttributeType(typ cty.Type) string {
var out string
switch {
case typ.IsPrimitiveType():
switch {
case typ.Equals(cty.Bool):
out = "bool"
case typ.Equals(cty.Number):
out = "int"
case typ.Equals(cty.String):
out = "string"
default:
panic("No idea what to do for: " + typ.FriendlyName())
}
case typ.IsMapType():
out = "map[string]" + processAttributeType(*typ.MapElementType())
case typ.IsSetType():
out = "[]" + processAttributeType(*typ.SetElementType())
case typ.IsListType():
out = "[]" + processAttributeType(*typ.ListElementType())
case typ.IsObjectType():
out = "any"
default:
panic("No idea what to do for: " + typ.FriendlyName())
}
return out
}
func nestedBlockKeys(block *tfjson.SchemaBlock) []string {
keys := sortKeys(block.NestedBlocks)
// Remove TF specific "timeouts" block.
if i := slices.Index(keys, "timeouts"); i != -1 {
keys = slices.Delete(keys, i, i+1)
}
return keys
}
func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error {
// Produce nested types before this block itself.
// This ensures types are defined before they are referenced.
for _, k := range nestedBlockKeys(block) {
v := block.NestedBlocks[k]
err := w.walk(v.Block, append(name, strcase.ToCamel(k)))
if err != nil {
return err
}
}
// Declare type.
typ := structType{
Name: strings.Join(name, ""),
}
// Declare attributes.
for _, k := range sortKeys(block.Attributes) {
v := block.Attributes[k]
// Assert the attribute type is always set.
if v.AttributeType == cty.NilType {
return fmt.Errorf("unexpected nil type for attribute %s", k)
}
// Collect field properties.
fieldName := strcase.ToCamel(k)
fieldType := processAttributeType(v.AttributeType)
fieldTag := k
if v.Required && v.Optional {
return fmt.Errorf("both required and optional are set for attribute %s", k)
}
if !v.Required {
fieldTag = fmt.Sprintf("%s,omitempty", fieldTag)
}
// Append to list of fields for type.
typ.Fields = append(typ.Fields, field{
Name: fieldName,
Type: fieldType,
Tag: fieldTag,
})
}
// Declare nested blocks.
for _, k := range nestedBlockKeys(block) {
v := block.NestedBlocks[k]
// Collect field properties.
fieldName := strcase.ToCamel(k)
fieldTypePrefix := ""
if v.MaxItems == 1 {
fieldTypePrefix = "*"
} else {
fieldTypePrefix = "[]"
}
fieldType := fmt.Sprintf("%s%s", fieldTypePrefix, strings.Join(append(name, strcase.ToCamel(k)), ""))
fieldTag := fmt.Sprintf("%s,omitempty", k)
// Append to list of fields for type.
typ.Fields = append(typ.Fields, field{
Name: fieldName,
Type: fieldType,
Tag: fieldTag,
})
}
// Append type to list of structs.
w.StructTypes = append(w.StructTypes, typ)
return nil
}
// walk recursively traverses [tfjson.SchemaBlock] and returns the
// set of types to declare to represents it as Go structs.
func walk(block *tfjson.SchemaBlock, name []string) (*walker, error) {
w := &walker{}
err := w.walk(block, name)
return w, err
}

View File

@ -0,0 +1,20 @@
module github.com/databricks/bricks/bundle/internal/tf/codegen
go 1.18
require github.com/hashicorp/terraform-json v0.14.0
require (
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e // indirect
)
require (
github.com/hashicorp/go-version v1.6.0 // indirect
github.com/hashicorp/hc-install v0.4.0
github.com/hashicorp/terraform-exec v0.17.3
github.com/iancoleman/strcase v0.2.0
github.com/zclconf/go-cty v1.11.0
golang.org/x/exp v0.0.0-20221204150635-6dcec336b2bb
golang.org/x/text v0.3.7 // indirect
)

View File

@ -0,0 +1,142 @@
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 h1:YoJbenK9C67SkzkDfmQuVln04ygHj3vjZfd9FL+GmQQ=
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7/go.mod h1:z4/9nQmJSSwwds7ejkxaJwO37dru3geImFUdJlaLzQo=
github.com/acomagu/bufpipe v1.0.3 h1:fxAGrHZTgQ9w5QqVItgzwj235/uYZYgbXitB+dLupOk=
github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
github.com/go-git/go-billy/v5 v5.2.0/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
github.com/go-git/go-billy/v5 v5.3.1 h1:CPiOUAzKtMRvolEKw+bG1PLRpT7D3LIs3/3ey4Aiu34=
github.com/go-git/go-billy/v5 v5.3.1/go.mod h1:pmpqyWchKfYfrkb/UVH4otLvyi/5gJlGI4Hb3ZqZ3W0=
github.com/go-git/go-git-fixtures/v4 v4.2.1/go.mod h1:K8zd3kDUAykwTdDCr+I0per6Y6vMiRR/nnVTBtavnB0=
github.com/go-git/go-git/v5 v5.4.2 h1:BXyZu9t0VkbiHtqrsvdq39UDhGJTl1h55VW6CSC4aY4=
github.com/go-git/go-git/v5 v5.4.2/go.mod h1:gQ1kArt6d+n+BGd+/B/I74HwRTLhth2+zti4ihgckDc=
github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg=
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-version v1.5.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/hc-install v0.4.0 h1:cZkRFr1WVa0Ty6x5fTvL1TuO1flul231rWkGH92oYYk=
github.com/hashicorp/hc-install v0.4.0/go.mod h1:5d155H8EC5ewegao9A4PUTMNPZaq+TbOzkJJZ4vrXeI=
github.com/hashicorp/terraform-exec v0.17.3 h1:MX14Kvnka/oWGmIkyuyvL6POx25ZmKrjlaclkx3eErU=
github.com/hashicorp/terraform-exec v0.17.3/go.mod h1:+NELG0EqQekJzhvikkeQsOAZpsw0cv/03rbeQJqscAI=
github.com/hashicorp/terraform-json v0.14.0 h1:sh9iZ1Y8IFJLx+xQiKHGud6/TSUCM0N8e17dKDpqV7s=
github.com/hashicorp/terraform-json v0.14.0/go.mod h1:5A9HIWPkk4e5aeeXIBbkcOvaZbIYnAIkEyqP2pNSckM=
github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0=
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sebdah/goldie v1.0.0/go.mod h1:jXP4hmWywNEwZzhMuv2ccnqTSFpuq8iyQhtQdkkZBH4=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
github.com/xanzy/ssh-agent v0.3.0 h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=
github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6eAcqrDh0=
github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
github.com/zclconf/go-cty v1.11.0 h1:726SxLdi2SDnjY+BStqB9J1hNp4+2WlzyXLuimibIe0=
github.com/zclconf/go-cty v1.11.0/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA=
github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8=
golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e h1:gsTQYXdTw2Gq7RBsWvlQ91b+aEQ6bXFUngBGuR8sPpI=
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20221204150635-6dcec336b2bb h1:QIsP/NmClBICkqnJ4rSIhnrGiGR7Yv9ZORGGnmmLTPk=
golang.org/x/exp v0.0.0-20221204150635-6dcec336b2bb/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210326060303-6b1517762897 h1:KrsHThm5nFk34YtATK1LsThyGhGbGe1olrte/HInHvs=
golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0 h1:kunALQeHf1/185U1i0GOB/fy1IPRDDpuoOOqRReG57U=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -0,0 +1,23 @@
package main
import (
"context"
"log"
"github.com/databricks/bricks/bundle/internal/tf/codegen/generator"
"github.com/databricks/bricks/bundle/internal/tf/codegen/schema"
)
func main() {
ctx := context.Background()
schema, err := schema.Load(ctx)
if err != nil {
log.Fatal(err)
}
err = generator.Run(ctx, schema, "../schema")
if err != nil {
log.Fatal(err)
}
}

View File

@ -0,0 +1,105 @@
package schema
import (
"context"
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"github.com/hashicorp/hc-install/product"
"github.com/hashicorp/hc-install/releases"
"github.com/hashicorp/terraform-exec/tfexec"
)
func (s *Schema) writeTerraformBlock(_ context.Context) error {
var body = map[string]interface{}{
"terraform": map[string]interface{}{
"required_providers": map[string]interface{}{
"databricks": map[string]interface{}{
"source": "databricks/databricks",
"version": ">= 1.0.0",
},
},
},
}
buf, err := json.MarshalIndent(body, "", " ")
if err != nil {
return err
}
return os.WriteFile(filepath.Join(s.WorkingDir, "main.tf.json"), buf, 0644)
}
func (s *Schema) installTerraform(ctx context.Context) (path string, err error) {
installDir := filepath.Join(s.WorkingDir, "bin")
err = os.MkdirAll(installDir, 0755)
if err != nil {
return
}
installer := &releases.LatestVersion{
InstallDir: installDir,
Product: product.Terraform,
}
installer.SetLogger(log.Default())
path, err = installer.Install(ctx)
return
}
func (s *Schema) generateSchema(ctx context.Context, execPath string) error {
tf, err := tfexec.NewTerraform(s.WorkingDir, execPath)
if err != nil {
return err
}
log.Printf("running `terraform init`")
err = tf.Init(ctx, tfexec.Upgrade(true))
if err != nil {
return err
}
log.Printf("acquiring provider schema")
schemas, err := tf.ProvidersSchema(ctx)
if err != nil {
return err
}
// Find the databricks provider definition.
_, ok := schemas.Schemas[DatabricksProvider]
if !ok {
return fmt.Errorf("schema file doesn't include schema for %s", DatabricksProvider)
}
buf, err := json.MarshalIndent(schemas, "", " ")
if err != nil {
return err
}
return os.WriteFile(s.ProviderSchemaFile, buf, 0644)
}
func (s *Schema) Generate(ctx context.Context) error {
var err error
err = s.writeTerraformBlock(ctx)
if err != nil {
return err
}
path, err := s.installTerraform(ctx)
if err != nil {
return err
}
err = s.generateSchema(ctx, path)
if err != nil {
return err
}
return nil
}

View File

@ -0,0 +1,36 @@
package schema
import (
"context"
"encoding/json"
"fmt"
"os"
tfjson "github.com/hashicorp/terraform-json"
)
func (s *Schema) Load(ctx context.Context) (*tfjson.ProviderSchema, error) {
buf, err := os.ReadFile(s.ProviderSchemaFile)
if err != nil {
return nil, err
}
var document tfjson.ProviderSchemas
err = json.Unmarshal(buf, &document)
if err != nil {
return nil, err
}
err = document.Validate()
if err != nil {
return nil, fmt.Errorf("invalid schema: %w", err)
}
// Find the databricks provider definition.
schema, ok := document.Schemas[DatabricksProvider]
if !ok {
return nil, fmt.Errorf("schema file doesn't include schema for %s", DatabricksProvider)
}
return schema, nil
}

View File

@ -0,0 +1,52 @@
package schema
import (
"context"
"os"
"path/filepath"
tfjson "github.com/hashicorp/terraform-json"
)
const DatabricksProvider = "registry.terraform.io/databricks/databricks"
type Schema struct {
WorkingDir string
ProviderSchemaFile string
}
func New() (*Schema, error) {
wd, err := os.Getwd()
if err != nil {
return nil, err
}
tmpdir := filepath.Join(wd, "./tmp")
err = os.MkdirAll(tmpdir, 0755)
if err != nil {
return nil, err
}
return &Schema{
WorkingDir: tmpdir,
ProviderSchemaFile: filepath.Join(tmpdir, "provider.json"),
}, nil
}
func Load(ctx context.Context) (*tfjson.ProviderSchema, error) {
s, err := New()
if err != nil {
return nil, err
}
// Generate schema file if it doesn't exist.
if _, err := os.Stat(s.ProviderSchemaFile); os.IsNotExist(err) {
err = s.Generate(ctx)
if err != nil {
return nil, err
}
}
return s.Load(ctx)
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
{{ range .StructTypes }}
type {{ .Name }} struct {
{{- range .Fields }}
{{ .Name }} {{ .Type }} `json:"{{ .Tag }}"`
{{- end }}
}
{{ end }}

View File

@ -0,0 +1,17 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSources struct {
{{- range .Blocks }}
{{ .FieldName }} map[string]*{{ .TypeName }} `json:"{{ .TerraformName }},omitempty"`
{{- end }}
}
func NewDataSources() *DataSources {
return &DataSources{
{{- range .Blocks }}
{{ .FieldName }}: make(map[string]*{{ .TypeName }}),
{{- end }}
}
}

View File

@ -0,0 +1,17 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type Resources struct {
{{- range .Blocks }}
{{ .FieldName }} map[string]*{{ .TypeName }} `json:"{{ .TerraformName }},omitempty"`
{{- end }}
}
func NewResources() *Resources {
return &Resources{
{{- range .Blocks }}
{{ .FieldName }}: make(map[string]*{{ .TypeName }}),
{{- end }}
}
}

View File

@ -0,0 +1,31 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type Config struct {
AccountId string `json:"account_id,omitempty"`
AuthType string `json:"auth_type,omitempty"`
AzureClientId string `json:"azure_client_id,omitempty"`
AzureClientSecret string `json:"azure_client_secret,omitempty"`
AzureEnvironment string `json:"azure_environment,omitempty"`
AzureLoginAppId string `json:"azure_login_app_id,omitempty"`
AzureTenantId string `json:"azure_tenant_id,omitempty"`
AzureUseMsi bool `json:"azure_use_msi,omitempty"`
AzureWorkspaceResourceId string `json:"azure_workspace_resource_id,omitempty"`
ClientId string `json:"client_id,omitempty"`
ClientSecret string `json:"client_secret,omitempty"`
ConfigFile string `json:"config_file,omitempty"`
DebugHeaders bool `json:"debug_headers,omitempty"`
DebugTruncateBytes int `json:"debug_truncate_bytes,omitempty"`
GoogleCredentials string `json:"google_credentials,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
Host string `json:"host,omitempty"`
HttpTimeoutSeconds int `json:"http_timeout_seconds,omitempty"`
Password string `json:"password,omitempty"`
Profile string `json:"profile,omitempty"`
RateLimit int `json:"rate_limit,omitempty"`
SkipVerify bool `json:"skip_verify,omitempty"`
Token string `json:"token,omitempty"`
TokenEndpoint string `json:"token_endpoint,omitempty"`
Username string `json:"username,omitempty"`
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceAwsAssumeRolePolicy struct {
DatabricksAccountId string `json:"databricks_account_id,omitempty"`
ExternalId string `json:"external_id"`
ForLogDelivery bool `json:"for_log_delivery,omitempty"`
Id string `json:"id,omitempty"`
Json string `json:"json,omitempty"`
}

View File

@ -0,0 +1,12 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceAwsBucketPolicy struct {
Bucket string `json:"bucket"`
DatabricksAccountId string `json:"databricks_account_id,omitempty"`
DatabricksE2AccountId string `json:"databricks_e2_account_id,omitempty"`
FullAccessRole string `json:"full_access_role,omitempty"`
Id string `json:"id,omitempty"`
Json string `json:"json,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceAwsCrossaccountPolicy struct {
Id string `json:"id,omitempty"`
Json string `json:"json,omitempty"`
PassRoles []string `json:"pass_roles,omitempty"`
}

View File

@ -0,0 +1,8 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceCatalogs struct {
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
}

View File

@ -0,0 +1,172 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceClusterClusterInfoAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type DataSourceClusterClusterInfoAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceClusterClusterInfoAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type DataSourceClusterClusterInfoClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type DataSourceClusterClusterInfoClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceClusterClusterInfoClusterLogConf struct {
Dbfs *DataSourceClusterClusterInfoClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *DataSourceClusterClusterInfoClusterLogConfS3 `json:"s3,omitempty"`
}
type DataSourceClusterClusterInfoClusterLogStatus struct {
LastAttempted int `json:"last_attempted,omitempty"`
LastException string `json:"last_exception,omitempty"`
}
type DataSourceClusterClusterInfoDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type DataSourceClusterClusterInfoDockerImage struct {
Url string `json:"url"`
BasicAuth *DataSourceClusterClusterInfoDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type DataSourceClusterClusterInfoDriverNodeAwsAttributes struct {
IsSpot bool `json:"is_spot,omitempty"`
}
type DataSourceClusterClusterInfoDriver struct {
HostPrivateIp string `json:"host_private_ip,omitempty"`
InstanceId string `json:"instance_id,omitempty"`
NodeId string `json:"node_id,omitempty"`
PrivateIp string `json:"private_ip,omitempty"`
PublicDns string `json:"public_dns,omitempty"`
StartTimestamp int `json:"start_timestamp,omitempty"`
NodeAwsAttributes *DataSourceClusterClusterInfoDriverNodeAwsAttributes `json:"node_aws_attributes,omitempty"`
}
type DataSourceClusterClusterInfoExecutorsNodeAwsAttributes struct {
IsSpot bool `json:"is_spot,omitempty"`
}
type DataSourceClusterClusterInfoExecutors struct {
HostPrivateIp string `json:"host_private_ip,omitempty"`
InstanceId string `json:"instance_id,omitempty"`
NodeId string `json:"node_id,omitempty"`
PrivateIp string `json:"private_ip,omitempty"`
PublicDns string `json:"public_dns,omitempty"`
StartTimestamp int `json:"start_timestamp,omitempty"`
NodeAwsAttributes *DataSourceClusterClusterInfoExecutorsNodeAwsAttributes `json:"node_aws_attributes,omitempty"`
}
type DataSourceClusterClusterInfoGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceClusterClusterInfoInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type DataSourceClusterClusterInfoInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceClusterClusterInfoInitScripts struct {
Dbfs *DataSourceClusterClusterInfoInitScriptsDbfs `json:"dbfs,omitempty"`
S3 *DataSourceClusterClusterInfoInitScriptsS3 `json:"s3,omitempty"`
}
type DataSourceClusterClusterInfoTerminationReason struct {
Code string `json:"code,omitempty"`
Parameters map[string]string `json:"parameters,omitempty"`
Type string `json:"type,omitempty"`
}
type DataSourceClusterClusterInfo struct {
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterCores int `json:"cluster_cores,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterMemoryMb int `json:"cluster_memory_mb,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
ClusterSource string `json:"cluster_source,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DefaultTags map[string]string `json:"default_tags"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
JdbcPort int `json:"jdbc_port,omitempty"`
LastActivityTime int `json:"last_activity_time,omitempty"`
LastStateLossTime int `json:"last_state_loss_time,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkContextId int `json:"spark_context_id,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
StartTime int `json:"start_time,omitempty"`
State string `json:"state"`
StateMessage string `json:"state_message,omitempty"`
TerminateTime int `json:"terminate_time,omitempty"`
Autoscale *DataSourceClusterClusterInfoAutoscale `json:"autoscale,omitempty"`
AwsAttributes *DataSourceClusterClusterInfoAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *DataSourceClusterClusterInfoAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *DataSourceClusterClusterInfoClusterLogConf `json:"cluster_log_conf,omitempty"`
ClusterLogStatus *DataSourceClusterClusterInfoClusterLogStatus `json:"cluster_log_status,omitempty"`
DockerImage *DataSourceClusterClusterInfoDockerImage `json:"docker_image,omitempty"`
Driver *DataSourceClusterClusterInfoDriver `json:"driver,omitempty"`
Executors []DataSourceClusterClusterInfoExecutors `json:"executors,omitempty"`
GcpAttributes *DataSourceClusterClusterInfoGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []DataSourceClusterClusterInfoInitScripts `json:"init_scripts,omitempty"`
TerminationReason *DataSourceClusterClusterInfoTerminationReason `json:"termination_reason,omitempty"`
}
type DataSourceCluster struct {
ClusterId string `json:"cluster_id"`
Id string `json:"id,omitempty"`
ClusterInfo *DataSourceClusterClusterInfo `json:"cluster_info,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceClusters struct {
ClusterNameContains string `json:"cluster_name_contains,omitempty"`
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceCurrentUser struct {
Alphanumeric string `json:"alphanumeric,omitempty"`
ExternalId string `json:"external_id,omitempty"`
Home string `json:"home,omitempty"`
Id string `json:"id,omitempty"`
Repos string `json:"repos,omitempty"`
UserName string `json:"user_name,omitempty"`
WorkspaceUrl string `json:"workspace_url,omitempty"`
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceDbfsFile struct {
Content string `json:"content,omitempty"`
FileSize int `json:"file_size,omitempty"`
Id string `json:"id,omitempty"`
LimitFileSize bool `json:"limit_file_size"`
Path string `json:"path"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceDbfsFilePaths struct {
Id string `json:"id,omitempty"`
Path string `json:"path"`
PathList []any `json:"path_list,omitempty"`
Recursive bool `json:"recursive"`
}

View File

@ -0,0 +1,20 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceGroup struct {
AllowClusterCreate bool `json:"allow_cluster_create,omitempty"`
AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"`
ChildGroups []string `json:"child_groups,omitempty"`
DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"`
DisplayName string `json:"display_name"`
ExternalId string `json:"external_id,omitempty"`
Groups []string `json:"groups,omitempty"`
Id string `json:"id,omitempty"`
InstanceProfiles []string `json:"instance_profiles,omitempty"`
Members []string `json:"members,omitempty"`
Recursive bool `json:"recursive,omitempty"`
ServicePrincipals []string `json:"service_principals,omitempty"`
Users []string `json:"users,omitempty"`
WorkspaceAccess bool `json:"workspace_access,omitempty"`
}

View File

@ -0,0 +1,659 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceJobJobSettingsSettingsDbtTask struct {
Commands []string `json:"commands"`
ProfilesDirectory string `json:"profiles_directory,omitempty"`
ProjectDirectory string `json:"project_directory,omitempty"`
Schema string `json:"schema,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsEmailNotifications struct {
AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"`
OnSuccess []string `json:"on_success,omitempty"`
}
type DataSourceJobJobSettingsSettingsGitSource struct {
Branch string `json:"branch,omitempty"`
Commit string `json:"commit,omitempty"`
Provider string `json:"provider,omitempty"`
Tag string `json:"tag,omitempty"`
Url string `json:"url"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterClusterLogConf struct {
Dbfs *DataSourceJobJobSettingsSettingsJobClusterNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsJobClusterNewClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *DataSourceJobJobSettingsSettingsJobClusterNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScripts struct {
Dbfs *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 `json:"s3,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterWorkloadType struct {
Clients *DataSourceJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *DataSourceJobJobSettingsSettingsJobClusterNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *DataSourceJobJobSettingsSettingsJobClusterNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *DataSourceJobJobSettingsSettingsJobClusterNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *DataSourceJobJobSettingsSettingsJobClusterNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *DataSourceJobJobSettingsSettingsJobClusterNewClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *DataSourceJobJobSettingsSettingsJobClusterNewClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScripts `json:"init_scripts,omitempty"`
WorkloadType *DataSourceJobJobSettingsSettingsJobClusterNewClusterWorkloadType `json:"workload_type,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobCluster struct {
JobClusterKey string `json:"job_cluster_key,omitempty"`
NewCluster *DataSourceJobJobSettingsSettingsJobClusterNewCluster `json:"new_cluster,omitempty"`
}
type DataSourceJobJobSettingsSettingsLibraryCran struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type DataSourceJobJobSettingsSettingsLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type DataSourceJobJobSettingsSettingsLibraryPypi struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type DataSourceJobJobSettingsSettingsLibrary struct {
Egg string `json:"egg,omitempty"`
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Cran *DataSourceJobJobSettingsSettingsLibraryCran `json:"cran,omitempty"`
Maven *DataSourceJobJobSettingsSettingsLibraryMaven `json:"maven,omitempty"`
Pypi *DataSourceJobJobSettingsSettingsLibraryPypi `json:"pypi,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type DataSourceJobJobSettingsSettingsNewClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterClusterLogConf struct {
Dbfs *DataSourceJobJobSettingsSettingsNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsNewClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type DataSourceJobJobSettingsSettingsNewClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *DataSourceJobJobSettingsSettingsNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScripts struct {
Dbfs *DataSourceJobJobSettingsSettingsNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsNewClusterInitScriptsS3 `json:"s3,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterWorkloadType struct {
Clients *DataSourceJobJobSettingsSettingsNewClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *DataSourceJobJobSettingsSettingsNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *DataSourceJobJobSettingsSettingsNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *DataSourceJobJobSettingsSettingsNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *DataSourceJobJobSettingsSettingsNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *DataSourceJobJobSettingsSettingsNewClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *DataSourceJobJobSettingsSettingsNewClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []DataSourceJobJobSettingsSettingsNewClusterInitScripts `json:"init_scripts,omitempty"`
WorkloadType *DataSourceJobJobSettingsSettingsNewClusterWorkloadType `json:"workload_type,omitempty"`
}
type DataSourceJobJobSettingsSettingsNotebookTask struct {
BaseParameters map[string]string `json:"base_parameters,omitempty"`
NotebookPath string `json:"notebook_path"`
}
type DataSourceJobJobSettingsSettingsPipelineTask struct {
PipelineId string `json:"pipeline_id"`
}
type DataSourceJobJobSettingsSettingsPythonWheelTask struct {
EntryPoint string `json:"entry_point,omitempty"`
NamedParameters map[string]string `json:"named_parameters,omitempty"`
PackageName string `json:"package_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsSchedule struct {
PauseStatus string `json:"pause_status,omitempty"`
QuartzCronExpression string `json:"quartz_cron_expression"`
TimezoneId string `json:"timezone_id"`
}
type DataSourceJobJobSettingsSettingsSparkJarTask struct {
JarUri string `json:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
}
type DataSourceJobJobSettingsSettingsSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskDbtTask struct {
Commands []string `json:"commands"`
ProfilesDirectory string `json:"profiles_directory,omitempty"`
ProjectDirectory string `json:"project_directory,omitempty"`
Schema string `json:"schema,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskDependsOn struct {
TaskKey string `json:"task_key,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct {
AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"`
OnSuccess []string `json:"on_success,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskLibraryCran struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskLibraryPypi struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskLibrary struct {
Egg string `json:"egg,omitempty"`
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Cran *DataSourceJobJobSettingsSettingsTaskLibraryCran `json:"cran,omitempty"`
Maven *DataSourceJobJobSettingsSettingsTaskLibraryMaven `json:"maven,omitempty"`
Pypi *DataSourceJobJobSettingsSettingsTaskLibraryPypi `json:"pypi,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterClusterLogConf struct {
Dbfs *DataSourceJobJobSettingsSettingsTaskNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsTaskNewClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *DataSourceJobJobSettingsSettingsTaskNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScripts struct {
Dbfs *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsS3 `json:"s3,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterWorkloadType struct {
Clients *DataSourceJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *DataSourceJobJobSettingsSettingsTaskNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *DataSourceJobJobSettingsSettingsTaskNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *DataSourceJobJobSettingsSettingsTaskNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *DataSourceJobJobSettingsSettingsTaskNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *DataSourceJobJobSettingsSettingsTaskNewClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *DataSourceJobJobSettingsSettingsTaskNewClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []DataSourceJobJobSettingsSettingsTaskNewClusterInitScripts `json:"init_scripts,omitempty"`
WorkloadType *DataSourceJobJobSettingsSettingsTaskNewClusterWorkloadType `json:"workload_type,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNotebookTask struct {
BaseParameters map[string]string `json:"base_parameters,omitempty"`
NotebookPath string `json:"notebook_path"`
}
type DataSourceJobJobSettingsSettingsTaskPipelineTask struct {
PipelineId string `json:"pipeline_id"`
}
type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct {
EntryPoint string `json:"entry_point,omitempty"`
NamedParameters map[string]string `json:"named_parameters,omitempty"`
PackageName string `json:"package_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct {
JarUri string `json:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
}
type DataSourceJobJobSettingsSettingsTaskSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskAlert struct {
AlertId string `json:"alert_id"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard struct {
DashboardId string `json:"dashboard_id"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskQuery struct {
QueryId string `json:"query_id"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTask struct {
Parameters map[string]string `json:"parameters,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
Alert *DataSourceJobJobSettingsSettingsTaskSqlTaskAlert `json:"alert,omitempty"`
Dashboard *DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard `json:"dashboard,omitempty"`
Query *DataSourceJobJobSettingsSettingsTaskSqlTaskQuery `json:"query,omitempty"`
}
type DataSourceJobJobSettingsSettingsTask struct {
Description string `json:"description,omitempty"`
ExistingClusterId string `json:"existing_cluster_id,omitempty"`
JobClusterKey string `json:"job_cluster_key,omitempty"`
MaxRetries int `json:"max_retries,omitempty"`
MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
TaskKey string `json:"task_key,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"`
DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"`
EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"`
Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"`
NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"`
NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"`
PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"`
}
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure struct {
Id string `json:"id"`
}
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart struct {
Id string `json:"id"`
}
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct {
Id string `json:"id"`
}
type DataSourceJobJobSettingsSettingsWebhookNotifications struct {
OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
}
type DataSourceJobJobSettingsSettings struct {
ExistingClusterId string `json:"existing_cluster_id,omitempty"`
Format string `json:"format,omitempty"`
MaxConcurrentRuns int `json:"max_concurrent_runs,omitempty"`
MaxRetries int `json:"max_retries,omitempty"`
MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"`
Name string `json:"name,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
Tags map[string]string `json:"tags,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
DbtTask *DataSourceJobJobSettingsSettingsDbtTask `json:"dbt_task,omitempty"`
EmailNotifications *DataSourceJobJobSettingsSettingsEmailNotifications `json:"email_notifications,omitempty"`
GitSource *DataSourceJobJobSettingsSettingsGitSource `json:"git_source,omitempty"`
JobCluster []DataSourceJobJobSettingsSettingsJobCluster `json:"job_cluster,omitempty"`
Library []DataSourceJobJobSettingsSettingsLibrary `json:"library,omitempty"`
NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"`
NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"`
PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"`
Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"`
SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *DataSourceJobJobSettingsSettingsSparkSubmitTask `json:"spark_submit_task,omitempty"`
Task []DataSourceJobJobSettingsSettingsTask `json:"task,omitempty"`
WebhookNotifications *DataSourceJobJobSettingsSettingsWebhookNotifications `json:"webhook_notifications,omitempty"`
}
type DataSourceJobJobSettings struct {
CreatedTime int `json:"created_time,omitempty"`
CreatorUserName string `json:"creator_user_name,omitempty"`
JobId int `json:"job_id,omitempty"`
Settings *DataSourceJobJobSettingsSettings `json:"settings,omitempty"`
}
type DataSourceJob struct {
Id string `json:"id,omitempty"`
JobId string `json:"job_id,omitempty"`
JobName string `json:"job_name,omitempty"`
JobSettings *DataSourceJobJobSettings `json:"job_settings,omitempty"`
}

View File

@ -0,0 +1,8 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceJobs struct {
Id string `json:"id,omitempty"`
Ids map[string]string `json:"ids,omitempty"`
}

View File

@ -0,0 +1,8 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceMwsWorkspaces struct {
Id string `json:"id,omitempty"`
Ids map[string]int `json:"ids,omitempty"`
}

View File

@ -0,0 +1,19 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceNodeType struct {
Category string `json:"category,omitempty"`
GbPerCore int `json:"gb_per_core,omitempty"`
Graviton bool `json:"graviton,omitempty"`
Id string `json:"id,omitempty"`
IsIoCacheEnabled bool `json:"is_io_cache_enabled,omitempty"`
LocalDisk bool `json:"local_disk,omitempty"`
MinCores int `json:"min_cores,omitempty"`
MinGpus int `json:"min_gpus,omitempty"`
MinMemoryGb int `json:"min_memory_gb,omitempty"`
PhotonDriverCapable bool `json:"photon_driver_capable,omitempty"`
PhotonWorkerCapable bool `json:"photon_worker_capable,omitempty"`
SupportPortForwarding bool `json:"support_port_forwarding,omitempty"`
Vcpu bool `json:"vcpu,omitempty"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceNotebook struct {
Content string `json:"content,omitempty"`
Format string `json:"format"`
Id string `json:"id,omitempty"`
Language string `json:"language,omitempty"`
ObjectId int `json:"object_id,omitempty"`
ObjectType string `json:"object_type,omitempty"`
Path string `json:"path"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceNotebookPaths struct {
Id string `json:"id,omitempty"`
NotebookPathList []any `json:"notebook_path_list,omitempty"`
Path string `json:"path"`
Recursive bool `json:"recursive"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceSchemas struct {
CatalogName string `json:"catalog_name"`
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
}

View File

@ -0,0 +1,14 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceServicePrincipal struct {
Active bool `json:"active,omitempty"`
ApplicationId string `json:"application_id,omitempty"`
DisplayName string `json:"display_name,omitempty"`
ExternalId string `json:"external_id,omitempty"`
Home string `json:"home,omitempty"`
Id string `json:"id,omitempty"`
Repos string `json:"repos,omitempty"`
SpId string `json:"sp_id,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceServicePrincipals struct {
ApplicationIds []string `json:"application_ids,omitempty"`
DisplayNameContains string `json:"display_name_contains,omitempty"`
Id string `json:"id,omitempty"`
}

View File

@ -0,0 +1,20 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceShareObject struct {
AddedAt int `json:"added_at,omitempty"`
AddedBy string `json:"added_by,omitempty"`
Comment string `json:"comment,omitempty"`
DataObjectType string `json:"data_object_type"`
Name string `json:"name"`
SharedAs string `json:"shared_as,omitempty"`
}
type DataSourceShare struct {
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Object []DataSourceShareObject `json:"object,omitempty"`
}

View File

@ -0,0 +1,8 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceShares struct {
Id string `json:"id,omitempty"`
Shares []string `json:"shares,omitempty"`
}

View File

@ -0,0 +1,17 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceSparkVersion struct {
Beta bool `json:"beta,omitempty"`
Genomics bool `json:"genomics,omitempty"`
Gpu bool `json:"gpu,omitempty"`
Graviton bool `json:"graviton,omitempty"`
Id string `json:"id,omitempty"`
Latest bool `json:"latest,omitempty"`
LongTermSupport bool `json:"long_term_support,omitempty"`
Ml bool `json:"ml,omitempty"`
Photon bool `json:"photon,omitempty"`
Scala string `json:"scala,omitempty"`
SparkVersion string `json:"spark_version,omitempty"`
}

View File

@ -0,0 +1,44 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceSqlWarehouseChannel struct {
Name string `json:"name,omitempty"`
}
type DataSourceSqlWarehouseOdbcParams struct {
Host string `json:"host,omitempty"`
Hostname string `json:"hostname,omitempty"`
Path string `json:"path"`
Port int `json:"port"`
Protocol string `json:"protocol"`
}
type DataSourceSqlWarehouseTagsCustomTags struct {
Key string `json:"key"`
Value string `json:"value"`
}
type DataSourceSqlWarehouseTags struct {
CustomTags []DataSourceSqlWarehouseTagsCustomTags `json:"custom_tags,omitempty"`
}
type DataSourceSqlWarehouse struct {
AutoStopMins int `json:"auto_stop_mins,omitempty"`
ClusterSize string `json:"cluster_size,omitempty"`
DataSourceId string `json:"data_source_id,omitempty"`
EnablePhoton bool `json:"enable_photon,omitempty"`
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
Id string `json:"id"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
JdbcUrl string `json:"jdbc_url,omitempty"`
MaxNumClusters int `json:"max_num_clusters,omitempty"`
MinNumClusters int `json:"min_num_clusters,omitempty"`
Name string `json:"name,omitempty"`
NumClusters int `json:"num_clusters,omitempty"`
SpotInstancePolicy string `json:"spot_instance_policy,omitempty"`
State string `json:"state,omitempty"`
Channel *DataSourceSqlWarehouseChannel `json:"channel,omitempty"`
OdbcParams *DataSourceSqlWarehouseOdbcParams `json:"odbc_params,omitempty"`
Tags *DataSourceSqlWarehouseTags `json:"tags,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceSqlWarehouses struct {
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
WarehouseNameContains string `json:"warehouse_name_contains,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceTables struct {
CatalogName string `json:"catalog_name"`
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
SchemaName string `json:"schema_name"`
}

View File

@ -0,0 +1,15 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceUser struct {
Alphanumeric string `json:"alphanumeric,omitempty"`
ApplicationId string `json:"application_id,omitempty"`
DisplayName string `json:"display_name,omitempty"`
ExternalId string `json:"external_id,omitempty"`
Home string `json:"home,omitempty"`
Id string `json:"id,omitempty"`
Repos string `json:"repos,omitempty"`
UserId string `json:"user_id,omitempty"`
UserName string `json:"user_name,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceViews struct {
CatalogName string `json:"catalog_name"`
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
SchemaName string `json:"schema_name"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceZones struct {
DefaultZone string `json:"default_zone,omitempty"`
Id string `json:"id,omitempty"`
Zones []string `json:"zones,omitempty"`
}

View File

@ -0,0 +1,67 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSources struct {
AwsAssumeRolePolicy map[string]*DataSourceAwsAssumeRolePolicy `json:"databricks_aws_assume_role_policy,omitempty"`
AwsBucketPolicy map[string]*DataSourceAwsBucketPolicy `json:"databricks_aws_bucket_policy,omitempty"`
AwsCrossaccountPolicy map[string]*DataSourceAwsCrossaccountPolicy `json:"databricks_aws_crossaccount_policy,omitempty"`
Catalogs map[string]*DataSourceCatalogs `json:"databricks_catalogs,omitempty"`
Cluster map[string]*DataSourceCluster `json:"databricks_cluster,omitempty"`
Clusters map[string]*DataSourceClusters `json:"databricks_clusters,omitempty"`
CurrentUser map[string]*DataSourceCurrentUser `json:"databricks_current_user,omitempty"`
DbfsFile map[string]*DataSourceDbfsFile `json:"databricks_dbfs_file,omitempty"`
DbfsFilePaths map[string]*DataSourceDbfsFilePaths `json:"databricks_dbfs_file_paths,omitempty"`
Group map[string]*DataSourceGroup `json:"databricks_group,omitempty"`
Job map[string]*DataSourceJob `json:"databricks_job,omitempty"`
Jobs map[string]*DataSourceJobs `json:"databricks_jobs,omitempty"`
MwsWorkspaces map[string]*DataSourceMwsWorkspaces `json:"databricks_mws_workspaces,omitempty"`
NodeType map[string]*DataSourceNodeType `json:"databricks_node_type,omitempty"`
Notebook map[string]*DataSourceNotebook `json:"databricks_notebook,omitempty"`
NotebookPaths map[string]*DataSourceNotebookPaths `json:"databricks_notebook_paths,omitempty"`
Schemas map[string]*DataSourceSchemas `json:"databricks_schemas,omitempty"`
ServicePrincipal map[string]*DataSourceServicePrincipal `json:"databricks_service_principal,omitempty"`
ServicePrincipals map[string]*DataSourceServicePrincipals `json:"databricks_service_principals,omitempty"`
Share map[string]*DataSourceShare `json:"databricks_share,omitempty"`
Shares map[string]*DataSourceShares `json:"databricks_shares,omitempty"`
SparkVersion map[string]*DataSourceSparkVersion `json:"databricks_spark_version,omitempty"`
SqlWarehouse map[string]*DataSourceSqlWarehouse `json:"databricks_sql_warehouse,omitempty"`
SqlWarehouses map[string]*DataSourceSqlWarehouses `json:"databricks_sql_warehouses,omitempty"`
Tables map[string]*DataSourceTables `json:"databricks_tables,omitempty"`
User map[string]*DataSourceUser `json:"databricks_user,omitempty"`
Views map[string]*DataSourceViews `json:"databricks_views,omitempty"`
Zones map[string]*DataSourceZones `json:"databricks_zones,omitempty"`
}
func NewDataSources() *DataSources {
return &DataSources{
AwsAssumeRolePolicy: make(map[string]*DataSourceAwsAssumeRolePolicy),
AwsBucketPolicy: make(map[string]*DataSourceAwsBucketPolicy),
AwsCrossaccountPolicy: make(map[string]*DataSourceAwsCrossaccountPolicy),
Catalogs: make(map[string]*DataSourceCatalogs),
Cluster: make(map[string]*DataSourceCluster),
Clusters: make(map[string]*DataSourceClusters),
CurrentUser: make(map[string]*DataSourceCurrentUser),
DbfsFile: make(map[string]*DataSourceDbfsFile),
DbfsFilePaths: make(map[string]*DataSourceDbfsFilePaths),
Group: make(map[string]*DataSourceGroup),
Job: make(map[string]*DataSourceJob),
Jobs: make(map[string]*DataSourceJobs),
MwsWorkspaces: make(map[string]*DataSourceMwsWorkspaces),
NodeType: make(map[string]*DataSourceNodeType),
Notebook: make(map[string]*DataSourceNotebook),
NotebookPaths: make(map[string]*DataSourceNotebookPaths),
Schemas: make(map[string]*DataSourceSchemas),
ServicePrincipal: make(map[string]*DataSourceServicePrincipal),
ServicePrincipals: make(map[string]*DataSourceServicePrincipals),
Share: make(map[string]*DataSourceShare),
Shares: make(map[string]*DataSourceShares),
SparkVersion: make(map[string]*DataSourceSparkVersion),
SqlWarehouse: make(map[string]*DataSourceSqlWarehouse),
SqlWarehouses: make(map[string]*DataSourceSqlWarehouses),
Tables: make(map[string]*DataSourceTables),
User: make(map[string]*DataSourceUser),
Views: make(map[string]*DataSourceViews),
Zones: make(map[string]*DataSourceZones),
}
}

View File

@ -0,0 +1,12 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceAwsS3Mount struct {
ClusterId string `json:"cluster_id,omitempty"`
Id string `json:"id,omitempty"`
InstanceProfile string `json:"instance_profile,omitempty"`
MountName string `json:"mount_name"`
S3BucketName string `json:"s3_bucket_name"`
Source string `json:"source,omitempty"`
}

View File

@ -0,0 +1,17 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceAzureAdlsGen1Mount struct {
ClientId string `json:"client_id"`
ClientSecretKey string `json:"client_secret_key"`
ClientSecretScope string `json:"client_secret_scope"`
ClusterId string `json:"cluster_id,omitempty"`
Directory string `json:"directory,omitempty"`
Id string `json:"id,omitempty"`
MountName string `json:"mount_name"`
Source string `json:"source,omitempty"`
SparkConfPrefix string `json:"spark_conf_prefix,omitempty"`
StorageResourceName string `json:"storage_resource_name"`
TenantId string `json:"tenant_id"`
}

View File

@ -0,0 +1,18 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceAzureAdlsGen2Mount struct {
ClientId string `json:"client_id"`
ClientSecretKey string `json:"client_secret_key"`
ClientSecretScope string `json:"client_secret_scope"`
ClusterId string `json:"cluster_id,omitempty"`
ContainerName string `json:"container_name"`
Directory string `json:"directory,omitempty"`
Id string `json:"id,omitempty"`
InitializeFileSystem bool `json:"initialize_file_system"`
MountName string `json:"mount_name"`
Source string `json:"source,omitempty"`
StorageAccountName string `json:"storage_account_name"`
TenantId string `json:"tenant_id"`
}

View File

@ -0,0 +1,16 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceAzureBlobMount struct {
AuthType string `json:"auth_type"`
ClusterId string `json:"cluster_id,omitempty"`
ContainerName string `json:"container_name"`
Directory string `json:"directory,omitempty"`
Id string `json:"id,omitempty"`
MountName string `json:"mount_name"`
Source string `json:"source,omitempty"`
StorageAccountName string `json:"storage_account_name"`
TokenSecretKey string `json:"token_secret_key"`
TokenSecretScope string `json:"token_secret_scope"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceCatalog struct {
Comment string `json:"comment,omitempty"`
ForceDestroy bool `json:"force_destroy,omitempty"`
Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name"`
Owner string `json:"owner,omitempty"`
Properties map[string]string `json:"properties,omitempty"`
}

View File

@ -0,0 +1,163 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type ResourceClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type ResourceClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type ResourceClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceClusterClusterLogConf struct {
Dbfs *ResourceClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *ResourceClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type ResourceClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type ResourceClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *ResourceClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type ResourceClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type ResourceClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type ResourceClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type ResourceClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceClusterInitScripts struct {
Dbfs *ResourceClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceClusterInitScriptsS3 `json:"s3,omitempty"`
}
type ResourceClusterLibraryCran struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceClusterLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type ResourceClusterLibraryPypi struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceClusterLibrary struct {
Egg string `json:"egg,omitempty"`
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Cran *ResourceClusterLibraryCran `json:"cran,omitempty"`
Maven *ResourceClusterLibraryMaven `json:"maven,omitempty"`
Pypi *ResourceClusterLibraryPypi `json:"pypi,omitempty"`
}
type ResourceClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type ResourceClusterWorkloadType struct {
Clients *ResourceClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type ResourceCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DefaultTags map[string]string `json:"default_tags,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
Id string `json:"id,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsPinned bool `json:"is_pinned,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
State string `json:"state,omitempty"`
Url string `json:"url,omitempty"`
Autoscale *ResourceClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *ResourceClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *ResourceClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *ResourceClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []ResourceClusterInitScripts `json:"init_scripts,omitempty"`
Library []ResourceClusterLibrary `json:"library,omitempty"`
WorkloadType *ResourceClusterWorkloadType `json:"workload_type,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceClusterPolicy struct {
Definition string `json:"definition,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name"`
PolicyId string `json:"policy_id,omitempty"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceDbfsFile struct {
ContentBase64 string `json:"content_base64,omitempty"`
DbfsPath string `json:"dbfs_path,omitempty"`
FileSize int `json:"file_size,omitempty"`
Id string `json:"id,omitempty"`
Md5 string `json:"md5,omitempty"`
Path string `json:"path"`
Source string `json:"source,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceDirectory struct {
DeleteRecursive bool `json:"delete_recursive,omitempty"`
Id string `json:"id,omitempty"`
ObjectId int `json:"object_id,omitempty"`
Path string `json:"path"`
}

View File

@ -0,0 +1,14 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceEntitlements struct {
AllowClusterCreate bool `json:"allow_cluster_create,omitempty"`
AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"`
DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"`
GroupId string `json:"group_id,omitempty"`
Id string `json:"id,omitempty"`
ServicePrincipalId string `json:"service_principal_id,omitempty"`
UserId string `json:"user_id,omitempty"`
WorkspaceAccess bool `json:"workspace_access,omitempty"`
}

View File

@ -0,0 +1,14 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceExternalLocation struct {
Comment string `json:"comment,omitempty"`
CredentialName string `json:"credential_name"`
Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name"`
Owner string `json:"owner,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty"`
Url string `json:"url"`
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGitCredential struct {
Force bool `json:"force,omitempty"`
GitProvider string `json:"git_provider"`
GitUsername string `json:"git_username"`
Id string `json:"id,omitempty"`
PersonalAccessToken string `json:"personal_access_token"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGlobalInitScript struct {
ContentBase64 string `json:"content_base64,omitempty"`
Enabled bool `json:"enabled,omitempty"`
Id string `json:"id,omitempty"`
Md5 string `json:"md5,omitempty"`
Name string `json:"name"`
Position int `json:"position,omitempty"`
Source string `json:"source,omitempty"`
}

View File

@ -0,0 +1,23 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGrantsGrant struct {
Principal string `json:"principal"`
Privileges []string `json:"privileges"`
}
type ResourceGrants struct {
Catalog string `json:"catalog,omitempty"`
ExternalLocation string `json:"external_location,omitempty"`
Function string `json:"function,omitempty"`
Id string `json:"id,omitempty"`
MaterializedView string `json:"materialized_view,omitempty"`
Metastore string `json:"metastore,omitempty"`
Schema string `json:"schema,omitempty"`
Share string `json:"share,omitempty"`
StorageCredential string `json:"storage_credential,omitempty"`
Table string `json:"table,omitempty"`
View string `json:"view,omitempty"`
Grant []ResourceGrantsGrant `json:"grant,omitempty"`
}

View File

@ -0,0 +1,15 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGroup struct {
AllowClusterCreate bool `json:"allow_cluster_create,omitempty"`
AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"`
DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"`
DisplayName string `json:"display_name"`
ExternalId string `json:"external_id,omitempty"`
Force bool `json:"force,omitempty"`
Id string `json:"id,omitempty"`
Url string `json:"url,omitempty"`
WorkspaceAccess bool `json:"workspace_access,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGroupInstanceProfile struct {
GroupId string `json:"group_id"`
Id string `json:"id,omitempty"`
InstanceProfileId string `json:"instance_profile_id"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGroupMember struct {
GroupId string `json:"group_id"`
Id string `json:"id,omitempty"`
MemberId string `json:"member_id"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceGroupRole struct {
GroupId string `json:"group_id"`
Id string `json:"id,omitempty"`
Role string `json:"role"`
}

View File

@ -0,0 +1,79 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceInstancePoolAwsAttributes struct {
Availability string `json:"availability,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceInstancePoolAzureAttributes struct {
Availability string `json:"availability,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type ResourceInstancePoolDiskSpecDiskType struct {
AzureDiskVolumeType string `json:"azure_disk_volume_type,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
}
type ResourceInstancePoolDiskSpec struct {
DiskCount int `json:"disk_count,omitempty"`
DiskSize int `json:"disk_size,omitempty"`
DiskType *ResourceInstancePoolDiskSpecDiskType `json:"disk_type,omitempty"`
}
type ResourceInstancePoolGcpAttributes struct {
GcpAvailability string `json:"gcp_availability,omitempty"`
}
type ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption struct {
AllocationStrategy string `json:"allocation_strategy"`
InstancePoolsToUseCount int `json:"instance_pools_to_use_count,omitempty"`
}
type ResourceInstancePoolInstancePoolFleetAttributesFleetSpotOption struct {
AllocationStrategy string `json:"allocation_strategy"`
InstancePoolsToUseCount int `json:"instance_pools_to_use_count,omitempty"`
}
type ResourceInstancePoolInstancePoolFleetAttributesLaunchTemplateOverride struct {
AvailabilityZone string `json:"availability_zone"`
InstanceType string `json:"instance_type"`
}
type ResourceInstancePoolInstancePoolFleetAttributes struct {
FleetOnDemandOption *ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption `json:"fleet_on_demand_option,omitempty"`
FleetSpotOption *ResourceInstancePoolInstancePoolFleetAttributesFleetSpotOption `json:"fleet_spot_option,omitempty"`
LaunchTemplateOverride []ResourceInstancePoolInstancePoolFleetAttributesLaunchTemplateOverride `json:"launch_template_override,omitempty"`
}
type ResourceInstancePoolPreloadedDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type ResourceInstancePoolPreloadedDockerImage struct {
Url string `json:"url"`
BasicAuth *ResourceInstancePoolPreloadedDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type ResourceInstancePool struct {
CustomTags map[string]string `json:"custom_tags,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
Id string `json:"id,omitempty"`
IdleInstanceAutoterminationMinutes int `json:"idle_instance_autotermination_minutes"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
InstancePoolName string `json:"instance_pool_name"`
MaxCapacity int `json:"max_capacity,omitempty"`
MinIdleInstances int `json:"min_idle_instances,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty"`
AwsAttributes *ResourceInstancePoolAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceInstancePoolAzureAttributes `json:"azure_attributes,omitempty"`
DiskSpec *ResourceInstancePoolDiskSpec `json:"disk_spec,omitempty"`
GcpAttributes *ResourceInstancePoolGcpAttributes `json:"gcp_attributes,omitempty"`
InstancePoolFleetAttributes *ResourceInstancePoolInstancePoolFleetAttributes `json:"instance_pool_fleet_attributes,omitempty"`
PreloadedDockerImage []ResourceInstancePoolPreloadedDockerImage `json:"preloaded_docker_image,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceInstanceProfile struct {
Id string `json:"id,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
IsMetaInstanceProfile bool `json:"is_meta_instance_profile,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty"`
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceIpAccessList struct {
Enabled bool `json:"enabled,omitempty"`
Id string `json:"id,omitempty"`
IpAddresses []string `json:"ip_addresses"`
Label string `json:"label"`
ListType string `json:"list_type"`
}

View File

@ -0,0 +1,648 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceJobDbtTask struct {
Commands []string `json:"commands"`
ProfilesDirectory string `json:"profiles_directory,omitempty"`
ProjectDirectory string `json:"project_directory,omitempty"`
Schema string `json:"schema,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
}
type ResourceJobEmailNotifications struct {
AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"`
OnSuccess []string `json:"on_success,omitempty"`
}
type ResourceJobGitSource struct {
Branch string `json:"branch,omitempty"`
Commit string `json:"commit,omitempty"`
Provider string `json:"provider,omitempty"`
Tag string `json:"tag,omitempty"`
Url string `json:"url"`
}
type ResourceJobJobClusterNewClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type ResourceJobJobClusterNewClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceJobJobClusterNewClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type ResourceJobJobClusterNewClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type ResourceJobJobClusterNewClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceJobJobClusterNewClusterClusterLogConf struct {
Dbfs *ResourceJobJobClusterNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *ResourceJobJobClusterNewClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type ResourceJobJobClusterNewClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type ResourceJobJobClusterNewClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *ResourceJobJobClusterNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type ResourceJobJobClusterNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceJobJobClusterNewClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type ResourceJobJobClusterNewClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobJobClusterNewClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobJobClusterNewClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceJobJobClusterNewClusterInitScripts struct {
Dbfs *ResourceJobJobClusterNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobJobClusterNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobJobClusterNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobJobClusterNewClusterInitScriptsS3 `json:"s3,omitempty"`
}
type ResourceJobJobClusterNewClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type ResourceJobJobClusterNewClusterWorkloadType struct {
Clients *ResourceJobJobClusterNewClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type ResourceJobJobClusterNewCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *ResourceJobJobClusterNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobJobClusterNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobJobClusterNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *ResourceJobJobClusterNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *ResourceJobJobClusterNewClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *ResourceJobJobClusterNewClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []ResourceJobJobClusterNewClusterInitScripts `json:"init_scripts,omitempty"`
WorkloadType *ResourceJobJobClusterNewClusterWorkloadType `json:"workload_type,omitempty"`
}
type ResourceJobJobCluster struct {
JobClusterKey string `json:"job_cluster_key,omitempty"`
NewCluster *ResourceJobJobClusterNewCluster `json:"new_cluster,omitempty"`
}
type ResourceJobLibraryCran struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceJobLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type ResourceJobLibraryPypi struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceJobLibrary struct {
Egg string `json:"egg,omitempty"`
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Cran *ResourceJobLibraryCran `json:"cran,omitempty"`
Maven *ResourceJobLibraryMaven `json:"maven,omitempty"`
Pypi *ResourceJobLibraryPypi `json:"pypi,omitempty"`
}
type ResourceJobNewClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type ResourceJobNewClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceJobNewClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type ResourceJobNewClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type ResourceJobNewClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceJobNewClusterClusterLogConf struct {
Dbfs *ResourceJobNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *ResourceJobNewClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type ResourceJobNewClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type ResourceJobNewClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *ResourceJobNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type ResourceJobNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceJobNewClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type ResourceJobNewClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobNewClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobNewClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceJobNewClusterInitScripts struct {
Dbfs *ResourceJobNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobNewClusterInitScriptsS3 `json:"s3,omitempty"`
}
type ResourceJobNewClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type ResourceJobNewClusterWorkloadType struct {
Clients *ResourceJobNewClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type ResourceJobNewCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *ResourceJobNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *ResourceJobNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *ResourceJobNewClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *ResourceJobNewClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []ResourceJobNewClusterInitScripts `json:"init_scripts,omitempty"`
WorkloadType *ResourceJobNewClusterWorkloadType `json:"workload_type,omitempty"`
}
type ResourceJobNotebookTask struct {
BaseParameters map[string]string `json:"base_parameters,omitempty"`
NotebookPath string `json:"notebook_path"`
}
type ResourceJobPipelineTask struct {
PipelineId string `json:"pipeline_id"`
}
type ResourceJobPythonWheelTask struct {
EntryPoint string `json:"entry_point,omitempty"`
NamedParameters map[string]string `json:"named_parameters,omitempty"`
PackageName string `json:"package_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobSchedule struct {
PauseStatus string `json:"pause_status,omitempty"`
QuartzCronExpression string `json:"quartz_cron_expression"`
TimezoneId string `json:"timezone_id"`
}
type ResourceJobSparkJarTask struct {
JarUri string `json:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
}
type ResourceJobSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobTaskDbtTask struct {
Commands []string `json:"commands"`
ProfilesDirectory string `json:"profiles_directory,omitempty"`
ProjectDirectory string `json:"project_directory,omitempty"`
Schema string `json:"schema,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
}
type ResourceJobTaskDependsOn struct {
TaskKey string `json:"task_key,omitempty"`
}
type ResourceJobTaskEmailNotifications struct {
AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
OnFailure []string `json:"on_failure,omitempty"`
OnStart []string `json:"on_start,omitempty"`
OnSuccess []string `json:"on_success,omitempty"`
}
type ResourceJobTaskLibraryCran struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceJobTaskLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type ResourceJobTaskLibraryPypi struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceJobTaskLibrary struct {
Egg string `json:"egg,omitempty"`
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Cran *ResourceJobTaskLibraryCran `json:"cran,omitempty"`
Maven *ResourceJobTaskLibraryMaven `json:"maven,omitempty"`
Pypi *ResourceJobTaskLibraryPypi `json:"pypi,omitempty"`
}
type ResourceJobTaskNewClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
}
type ResourceJobTaskNewClusterAwsAttributes struct {
Availability string `json:"availability,omitempty"`
EbsVolumeCount int `json:"ebs_volume_count,omitempty"`
EbsVolumeSize int `json:"ebs_volume_size,omitempty"`
EbsVolumeType string `json:"ebs_volume_type,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
SpotBidPricePercent int `json:"spot_bid_price_percent,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceJobTaskNewClusterAzureAttributes struct {
Availability string `json:"availability,omitempty"`
FirstOnDemand int `json:"first_on_demand,omitempty"`
SpotBidMaxPrice int `json:"spot_bid_max_price,omitempty"`
}
type ResourceJobTaskNewClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type ResourceJobTaskNewClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceJobTaskNewClusterClusterLogConf struct {
Dbfs *ResourceJobTaskNewClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *ResourceJobTaskNewClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type ResourceJobTaskNewClusterDockerImageBasicAuth struct {
Password string `json:"password"`
Username string `json:"username"`
}
type ResourceJobTaskNewClusterDockerImage struct {
Url string `json:"url"`
BasicAuth *ResourceJobTaskNewClusterDockerImageBasicAuth `json:"basic_auth,omitempty"`
}
type ResourceJobTaskNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourceJobTaskNewClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type ResourceJobTaskNewClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobTaskNewClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobTaskNewClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourceJobTaskNewClusterInitScripts struct {
Dbfs *ResourceJobTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobTaskNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobTaskNewClusterInitScriptsS3 `json:"s3,omitempty"`
}
type ResourceJobTaskNewClusterWorkloadTypeClients struct {
Jobs bool `json:"jobs,omitempty"`
Notebooks bool `json:"notebooks,omitempty"`
}
type ResourceJobTaskNewClusterWorkloadType struct {
Clients *ResourceJobTaskNewClusterWorkloadTypeClients `json:"clients,omitempty"`
}
type ResourceJobTaskNewCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
AutoterminationMinutes int `json:"autotermination_minutes,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterName string `json:"cluster_name,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DataSecurityMode string `json:"data_security_mode,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
RuntimeEngine string `json:"runtime_engine,omitempty"`
SingleUserName string `json:"single_user_name,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *ResourceJobTaskNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobTaskNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobTaskNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
ClusterLogConf *ResourceJobTaskNewClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
DockerImage *ResourceJobTaskNewClusterDockerImage `json:"docker_image,omitempty"`
GcpAttributes *ResourceJobTaskNewClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []ResourceJobTaskNewClusterInitScripts `json:"init_scripts,omitempty"`
WorkloadType *ResourceJobTaskNewClusterWorkloadType `json:"workload_type,omitempty"`
}
type ResourceJobTaskNotebookTask struct {
BaseParameters map[string]string `json:"base_parameters,omitempty"`
NotebookPath string `json:"notebook_path"`
}
type ResourceJobTaskPipelineTask struct {
PipelineId string `json:"pipeline_id"`
}
type ResourceJobTaskPythonWheelTask struct {
EntryPoint string `json:"entry_point,omitempty"`
NamedParameters map[string]string `json:"named_parameters,omitempty"`
PackageName string `json:"package_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobTaskSparkJarTask struct {
JarUri string `json:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty"`
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobTaskSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
}
type ResourceJobTaskSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobTaskSqlTaskAlert struct {
AlertId string `json:"alert_id"`
}
type ResourceJobTaskSqlTaskDashboard struct {
DashboardId string `json:"dashboard_id"`
}
type ResourceJobTaskSqlTaskQuery struct {
QueryId string `json:"query_id"`
}
type ResourceJobTaskSqlTask struct {
Parameters map[string]string `json:"parameters,omitempty"`
WarehouseId string `json:"warehouse_id,omitempty"`
Alert *ResourceJobTaskSqlTaskAlert `json:"alert,omitempty"`
Dashboard *ResourceJobTaskSqlTaskDashboard `json:"dashboard,omitempty"`
Query *ResourceJobTaskSqlTaskQuery `json:"query,omitempty"`
}
type ResourceJobTask struct {
Description string `json:"description,omitempty"`
ExistingClusterId string `json:"existing_cluster_id,omitempty"`
JobClusterKey string `json:"job_cluster_key,omitempty"`
MaxRetries int `json:"max_retries,omitempty"`
MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
TaskKey string `json:"task_key,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"`
DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"`
EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"`
Library []ResourceJobTaskLibrary `json:"library,omitempty"`
NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"`
NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"`
PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"`
}
type ResourceJobWebhookNotificationsOnFailure struct {
Id string `json:"id"`
}
type ResourceJobWebhookNotificationsOnStart struct {
Id string `json:"id"`
}
type ResourceJobWebhookNotificationsOnSuccess struct {
Id string `json:"id"`
}
type ResourceJobWebhookNotifications struct {
OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"`
OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
}
type ResourceJob struct {
AlwaysRunning bool `json:"always_running,omitempty"`
ExistingClusterId string `json:"existing_cluster_id,omitempty"`
Format string `json:"format,omitempty"`
Id string `json:"id,omitempty"`
MaxConcurrentRuns int `json:"max_concurrent_runs,omitempty"`
MaxRetries int `json:"max_retries,omitempty"`
MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"`
Name string `json:"name,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
Tags map[string]string `json:"tags,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
Url string `json:"url,omitempty"`
DbtTask *ResourceJobDbtTask `json:"dbt_task,omitempty"`
EmailNotifications *ResourceJobEmailNotifications `json:"email_notifications,omitempty"`
GitSource *ResourceJobGitSource `json:"git_source,omitempty"`
JobCluster []ResourceJobJobCluster `json:"job_cluster,omitempty"`
Library []ResourceJobLibrary `json:"library,omitempty"`
NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"`
NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"`
PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"`
Schedule *ResourceJobSchedule `json:"schedule,omitempty"`
SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *ResourceJobSparkSubmitTask `json:"spark_submit_task,omitempty"`
Task []ResourceJobTask `json:"task,omitempty"`
WebhookNotifications *ResourceJobWebhookNotifications `json:"webhook_notifications,omitempty"`
}

View File

@ -0,0 +1,30 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceLibraryCran struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type ResourceLibraryPypi struct {
Package string `json:"package"`
Repo string `json:"repo,omitempty"`
}
type ResourceLibrary struct {
ClusterId string `json:"cluster_id"`
Egg string `json:"egg,omitempty"`
Id string `json:"id,omitempty"`
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Cran *ResourceLibraryCran `json:"cran,omitempty"`
Maven *ResourceLibraryMaven `json:"maven,omitempty"`
Pypi *ResourceLibraryPypi `json:"pypi,omitempty"`
}

View File

@ -0,0 +1,22 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMetastore struct {
Cloud string `json:"cloud,omitempty"`
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
DefaultDataAccessConfigId string `json:"default_data_access_config_id,omitempty"`
DeltaSharingOrganizationName string `json:"delta_sharing_organization_name,omitempty"`
DeltaSharingRecipientTokenLifetimeInSeconds int `json:"delta_sharing_recipient_token_lifetime_in_seconds,omitempty"`
DeltaSharingScope string `json:"delta_sharing_scope,omitempty"`
ForceDestroy bool `json:"force_destroy,omitempty"`
GlobalMetastoreId string `json:"global_metastore_id,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name"`
Owner string `json:"owner,omitempty"`
Region string `json:"region,omitempty"`
StorageRoot string `json:"storage_root"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMetastoreAssignment struct {
DefaultCatalogName string `json:"default_catalog_name,omitempty"`
Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id"`
WorkspaceId int `json:"workspace_id"`
}

View File

@ -0,0 +1,28 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMetastoreDataAccessAwsIamRole struct {
RoleArn string `json:"role_arn"`
}
type ResourceMetastoreDataAccessAzureManagedIdentity struct {
AccessConnectorId string `json:"access_connector_id"`
}
type ResourceMetastoreDataAccessAzureServicePrincipal struct {
ApplicationId string `json:"application_id"`
ClientSecret string `json:"client_secret"`
DirectoryId string `json:"directory_id"`
}
type ResourceMetastoreDataAccess struct {
ConfigurationType string `json:"configuration_type,omitempty"`
Id string `json:"id,omitempty"`
IsDefault bool `json:"is_default,omitempty"`
MetastoreId string `json:"metastore_id"`
Name string `json:"name"`
AwsIamRole *ResourceMetastoreDataAccessAwsIamRole `json:"aws_iam_role,omitempty"`
AzureManagedIdentity *ResourceMetastoreDataAccessAzureManagedIdentity `json:"azure_managed_identity,omitempty"`
AzureServicePrincipal *ResourceMetastoreDataAccessAzureServicePrincipal `json:"azure_service_principal,omitempty"`
}

View File

@ -0,0 +1,14 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMlflowExperiment struct {
ArtifactLocation string `json:"artifact_location,omitempty"`
CreationTime int `json:"creation_time,omitempty"`
Description string `json:"description,omitempty"`
ExperimentId string `json:"experiment_id,omitempty"`
Id string `json:"id,omitempty"`
LastUpdateTime int `json:"last_update_time,omitempty"`
LifecycleStage string `json:"lifecycle_stage,omitempty"`
Name string `json:"name"`
}

View File

@ -0,0 +1,19 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMlflowModelTags struct {
Key string `json:"key"`
Value string `json:"value"`
}
type ResourceMlflowModel struct {
CreationTimestamp int `json:"creation_timestamp,omitempty"`
Description string `json:"description,omitempty"`
Id string `json:"id,omitempty"`
LastUpdatedTimestamp int `json:"last_updated_timestamp,omitempty"`
Name string `json:"name"`
RegisteredModelId string `json:"registered_model_id,omitempty"`
UserId string `json:"user_id,omitempty"`
Tags []ResourceMlflowModelTags `json:"tags,omitempty"`
}

View File

@ -0,0 +1,26 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMlflowWebhookHttpUrlSpec struct {
Authorization string `json:"authorization,omitempty"`
EnableSslVerification bool `json:"enable_ssl_verification,omitempty"`
Secret string `json:"secret,omitempty"`
Url string `json:"url"`
}
type ResourceMlflowWebhookJobSpec struct {
AccessToken string `json:"access_token"`
JobId string `json:"job_id"`
WorkspaceUrl string `json:"workspace_url,omitempty"`
}
type ResourceMlflowWebhook struct {
Description string `json:"description,omitempty"`
Events []string `json:"events"`
Id string `json:"id,omitempty"`
ModelName string `json:"model_name,omitempty"`
Status string `json:"status,omitempty"`
HttpUrlSpec *ResourceMlflowWebhookHttpUrlSpec `json:"http_url_spec,omitempty"`
JobSpec *ResourceMlflowWebhookJobSpec `json:"job_spec,omitempty"`
}

View File

@ -0,0 +1,59 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMountAbfs struct {
ClientId string `json:"client_id"`
ClientSecretKey string `json:"client_secret_key"`
ClientSecretScope string `json:"client_secret_scope"`
ContainerName string `json:"container_name,omitempty"`
Directory string `json:"directory,omitempty"`
InitializeFileSystem bool `json:"initialize_file_system"`
StorageAccountName string `json:"storage_account_name,omitempty"`
TenantId string `json:"tenant_id,omitempty"`
}
type ResourceMountAdl struct {
ClientId string `json:"client_id"`
ClientSecretKey string `json:"client_secret_key"`
ClientSecretScope string `json:"client_secret_scope"`
Directory string `json:"directory,omitempty"`
SparkConfPrefix string `json:"spark_conf_prefix,omitempty"`
StorageResourceName string `json:"storage_resource_name,omitempty"`
TenantId string `json:"tenant_id,omitempty"`
}
type ResourceMountGs struct {
BucketName string `json:"bucket_name"`
ServiceAccount string `json:"service_account,omitempty"`
}
type ResourceMountS3 struct {
BucketName string `json:"bucket_name"`
InstanceProfile string `json:"instance_profile,omitempty"`
}
type ResourceMountWasb struct {
AuthType string `json:"auth_type"`
ContainerName string `json:"container_name,omitempty"`
Directory string `json:"directory,omitempty"`
StorageAccountName string `json:"storage_account_name,omitempty"`
TokenSecretKey string `json:"token_secret_key"`
TokenSecretScope string `json:"token_secret_scope"`
}
type ResourceMount struct {
ClusterId string `json:"cluster_id,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
ExtraConfigs map[string]string `json:"extra_configs,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
ResourceId string `json:"resource_id,omitempty"`
Source string `json:"source,omitempty"`
Uri string `json:"uri,omitempty"`
Abfs *ResourceMountAbfs `json:"abfs,omitempty"`
Adl *ResourceMountAdl `json:"adl,omitempty"`
Gs *ResourceMountGs `json:"gs,omitempty"`
S3 *ResourceMountS3 `json:"s3,omitempty"`
Wasb *ResourceMountWasb `json:"wasb,omitempty"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsCredentials struct {
AccountId string `json:"account_id"`
CreationTime int `json:"creation_time,omitempty"`
CredentialsId string `json:"credentials_id,omitempty"`
CredentialsName string `json:"credentials_name"`
ExternalId string `json:"external_id,omitempty"`
Id string `json:"id,omitempty"`
RoleArn string `json:"role_arn"`
}

View File

@ -0,0 +1,18 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsCustomerManagedKeysAwsKeyInfo struct {
KeyAlias string `json:"key_alias"`
KeyArn string `json:"key_arn"`
KeyRegion string `json:"key_region,omitempty"`
}
type ResourceMwsCustomerManagedKeys struct {
AccountId string `json:"account_id"`
CreationTime int `json:"creation_time,omitempty"`
CustomerManagedKeyId string `json:"customer_managed_key_id,omitempty"`
Id string `json:"id,omitempty"`
UseCases []string `json:"use_cases"`
AwsKeyInfo *ResourceMwsCustomerManagedKeysAwsKeyInfo `json:"aws_key_info,omitempty"`
}

View File

@ -0,0 +1,18 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsLogDelivery struct {
AccountId string `json:"account_id"`
ConfigId string `json:"config_id,omitempty"`
ConfigName string `json:"config_name,omitempty"`
CredentialsId string `json:"credentials_id"`
DeliveryPathPrefix string `json:"delivery_path_prefix,omitempty"`
DeliveryStartTime string `json:"delivery_start_time,omitempty"`
Id string `json:"id,omitempty"`
LogType string `json:"log_type"`
OutputFormat string `json:"output_format"`
Status string `json:"status,omitempty"`
StorageConfigurationId string `json:"storage_configuration_id"`
WorkspaceIdsFilter []int `json:"workspace_ids_filter,omitempty"`
}

View File

@ -0,0 +1,28 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsNetworksErrorMessages struct {
ErrorMessage string `json:"error_message,omitempty"`
ErrorType string `json:"error_type,omitempty"`
}
type ResourceMwsNetworksVpcEndpoints struct {
DataplaneRelay []string `json:"dataplane_relay"`
RestApi []string `json:"rest_api"`
}
type ResourceMwsNetworks struct {
AccountId string `json:"account_id"`
CreationTime int `json:"creation_time,omitempty"`
Id string `json:"id,omitempty"`
NetworkId string `json:"network_id,omitempty"`
NetworkName string `json:"network_name"`
SecurityGroupIds []string `json:"security_group_ids"`
SubnetIds []string `json:"subnet_ids"`
VpcId string `json:"vpc_id"`
VpcStatus string `json:"vpc_status,omitempty"`
WorkspaceId int `json:"workspace_id,omitempty"`
ErrorMessages []ResourceMwsNetworksErrorMessages `json:"error_messages,omitempty"`
VpcEndpoints *ResourceMwsNetworksVpcEndpoints `json:"vpc_endpoints,omitempty"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsPermissionAssignment struct {
Id string `json:"id,omitempty"`
Permissions []string `json:"permissions"`
PrincipalId int `json:"principal_id"`
WorkspaceId int `json:"workspace_id"`
}

View File

@ -0,0 +1,15 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsPrivateAccessSettings struct {
AccountId string `json:"account_id,omitempty"`
AllowedVpcEndpointIds []string `json:"allowed_vpc_endpoint_ids,omitempty"`
Id string `json:"id,omitempty"`
PrivateAccessLevel string `json:"private_access_level,omitempty"`
PrivateAccessSettingsId string `json:"private_access_settings_id,omitempty"`
PrivateAccessSettingsName string `json:"private_access_settings_name"`
PublicAccessEnabled bool `json:"public_access_enabled,omitempty"`
Region string `json:"region"`
Status string `json:"status,omitempty"`
}

View File

@ -0,0 +1,12 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsStorageConfigurations struct {
AccountId string `json:"account_id"`
BucketName string `json:"bucket_name"`
CreationTime int `json:"creation_time,omitempty"`
Id string `json:"id,omitempty"`
StorageConfigurationId string `json:"storage_configuration_id,omitempty"`
StorageConfigurationName string `json:"storage_configuration_name"`
}

View File

@ -0,0 +1,16 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsVpcEndpoint struct {
AccountId string `json:"account_id,omitempty"`
AwsAccountId string `json:"aws_account_id,omitempty"`
AwsEndpointServiceId string `json:"aws_endpoint_service_id,omitempty"`
AwsVpcEndpointId string `json:"aws_vpc_endpoint_id"`
Id string `json:"id,omitempty"`
Region string `json:"region"`
State string `json:"state,omitempty"`
UseCase string `json:"use_case,omitempty"`
VpcEndpointId string `json:"vpc_endpoint_id,omitempty"`
VpcEndpointName string `json:"vpc_endpoint_name"`
}

View File

@ -0,0 +1,69 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceMwsWorkspacesCloudResourceBucketGcp struct {
ProjectId string `json:"project_id"`
}
type ResourceMwsWorkspacesCloudResourceBucket struct {
Gcp *ResourceMwsWorkspacesCloudResourceBucketGcp `json:"gcp,omitempty"`
}
type ResourceMwsWorkspacesExternalCustomerInfo struct {
AuthoritativeUserEmail string `json:"authoritative_user_email"`
AuthoritativeUserFullName string `json:"authoritative_user_full_name"`
CustomerName string `json:"customer_name"`
}
type ResourceMwsWorkspacesNetworkGcpCommonNetworkConfig struct {
GkeClusterMasterIpRange string `json:"gke_cluster_master_ip_range"`
GkeConnectivityType string `json:"gke_connectivity_type"`
}
type ResourceMwsWorkspacesNetworkGcpManagedNetworkConfig struct {
GkeClusterPodIpRange string `json:"gke_cluster_pod_ip_range"`
GkeClusterServiceIpRange string `json:"gke_cluster_service_ip_range"`
SubnetCidr string `json:"subnet_cidr"`
}
type ResourceMwsWorkspacesNetwork struct {
NetworkId string `json:"network_id,omitempty"`
GcpCommonNetworkConfig *ResourceMwsWorkspacesNetworkGcpCommonNetworkConfig `json:"gcp_common_network_config,omitempty"`
GcpManagedNetworkConfig *ResourceMwsWorkspacesNetworkGcpManagedNetworkConfig `json:"gcp_managed_network_config,omitempty"`
}
type ResourceMwsWorkspacesToken struct {
Comment string `json:"comment,omitempty"`
LifetimeSeconds int `json:"lifetime_seconds,omitempty"`
TokenId string `json:"token_id,omitempty"`
TokenValue string `json:"token_value,omitempty"`
}
type ResourceMwsWorkspaces struct {
AccountId string `json:"account_id"`
AwsRegion string `json:"aws_region,omitempty"`
Cloud string `json:"cloud,omitempty"`
CreationTime int `json:"creation_time,omitempty"`
CredentialsId string `json:"credentials_id,omitempty"`
CustomerManagedKeyId string `json:"customer_managed_key_id,omitempty"`
DeploymentName string `json:"deployment_name,omitempty"`
Id string `json:"id,omitempty"`
IsNoPublicIpEnabled bool `json:"is_no_public_ip_enabled,omitempty"`
Location string `json:"location,omitempty"`
ManagedServicesCustomerManagedKeyId string `json:"managed_services_customer_managed_key_id,omitempty"`
NetworkId string `json:"network_id,omitempty"`
PricingTier string `json:"pricing_tier,omitempty"`
PrivateAccessSettingsId string `json:"private_access_settings_id,omitempty"`
StorageConfigurationId string `json:"storage_configuration_id,omitempty"`
StorageCustomerManagedKeyId string `json:"storage_customer_managed_key_id,omitempty"`
WorkspaceId int `json:"workspace_id,omitempty"`
WorkspaceName string `json:"workspace_name"`
WorkspaceStatus string `json:"workspace_status,omitempty"`
WorkspaceStatusMessage string `json:"workspace_status_message,omitempty"`
WorkspaceUrl string `json:"workspace_url,omitempty"`
CloudResourceBucket *ResourceMwsWorkspacesCloudResourceBucket `json:"cloud_resource_bucket,omitempty"`
ExternalCustomerInfo *ResourceMwsWorkspacesExternalCustomerInfo `json:"external_customer_info,omitempty"`
Network *ResourceMwsWorkspacesNetwork `json:"network,omitempty"`
Token *ResourceMwsWorkspacesToken `json:"token,omitempty"`
}

View File

@ -0,0 +1,16 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceNotebook struct {
ContentBase64 string `json:"content_base64,omitempty"`
Format string `json:"format,omitempty"`
Id string `json:"id,omitempty"`
Language string `json:"language,omitempty"`
Md5 string `json:"md5,omitempty"`
ObjectId int `json:"object_id,omitempty"`
ObjectType string `json:"object_type,omitempty"`
Path string `json:"path"`
Source string `json:"source,omitempty"`
Url string `json:"url,omitempty"`
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceOboToken struct {
ApplicationId string `json:"application_id"`
Comment string `json:"comment"`
Id string `json:"id,omitempty"`
LifetimeSeconds int `json:"lifetime_seconds"`
TokenValue string `json:"token_value,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourcePermissionAssignment struct {
Id string `json:"id,omitempty"`
Permissions []string `json:"permissions"`
PrincipalId int `json:"principal_id"`
}

View File

@ -0,0 +1,34 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourcePermissionsAccessControl struct {
GroupName string `json:"group_name,omitempty"`
PermissionLevel string `json:"permission_level"`
ServicePrincipalName string `json:"service_principal_name,omitempty"`
UserName string `json:"user_name,omitempty"`
}
type ResourcePermissions struct {
Authorization string `json:"authorization,omitempty"`
ClusterId string `json:"cluster_id,omitempty"`
ClusterPolicyId string `json:"cluster_policy_id,omitempty"`
DirectoryId string `json:"directory_id,omitempty"`
DirectoryPath string `json:"directory_path,omitempty"`
ExperimentId string `json:"experiment_id,omitempty"`
Id string `json:"id,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
JobId string `json:"job_id,omitempty"`
NotebookId string `json:"notebook_id,omitempty"`
NotebookPath string `json:"notebook_path,omitempty"`
ObjectType string `json:"object_type,omitempty"`
PipelineId string `json:"pipeline_id,omitempty"`
RegisteredModelId string `json:"registered_model_id,omitempty"`
RepoId string `json:"repo_id,omitempty"`
RepoPath string `json:"repo_path,omitempty"`
SqlAlertId string `json:"sql_alert_id,omitempty"`
SqlDashboardId string `json:"sql_dashboard_id,omitempty"`
SqlEndpointId string `json:"sql_endpoint_id,omitempty"`
SqlQueryId string `json:"sql_query_id,omitempty"`
AccessControl []ResourcePermissionsAccessControl `json:"access_control,omitempty"`
}

View File

@ -0,0 +1,127 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourcePipelineClusterAutoscale struct {
MaxWorkers int `json:"max_workers,omitempty"`
MinWorkers int `json:"min_workers,omitempty"`
Mode string `json:"mode,omitempty"`
}
type ResourcePipelineClusterAwsAttributes struct {
FirstOnDemand int `json:"first_on_demand,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
type ResourcePipelineClusterClusterLogConfDbfs struct {
Destination string `json:"destination"`
}
type ResourcePipelineClusterClusterLogConfS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourcePipelineClusterClusterLogConf struct {
Dbfs *ResourcePipelineClusterClusterLogConfDbfs `json:"dbfs,omitempty"`
S3 *ResourcePipelineClusterClusterLogConfS3 `json:"s3,omitempty"`
}
type ResourcePipelineClusterGcpAttributes struct {
GoogleServiceAccount string `json:"google_service_account,omitempty"`
}
type ResourcePipelineClusterInitScriptsDbfs struct {
Destination string `json:"destination"`
}
type ResourcePipelineClusterInitScriptsFile struct {
Destination string `json:"destination,omitempty"`
}
type ResourcePipelineClusterInitScriptsGcs struct {
Destination string `json:"destination,omitempty"`
}
type ResourcePipelineClusterInitScriptsS3 struct {
CannedAcl string `json:"canned_acl,omitempty"`
Destination string `json:"destination"`
EnableEncryption bool `json:"enable_encryption,omitempty"`
EncryptionType string `json:"encryption_type,omitempty"`
Endpoint string `json:"endpoint,omitempty"`
KmsKey string `json:"kms_key,omitempty"`
Region string `json:"region,omitempty"`
}
type ResourcePipelineClusterInitScripts struct {
Dbfs *ResourcePipelineClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourcePipelineClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourcePipelineClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourcePipelineClusterInitScriptsS3 `json:"s3,omitempty"`
}
type ResourcePipelineCluster struct {
ApplyPolicyDefaultValues bool `json:"apply_policy_default_values,omitempty"`
CustomTags map[string]string `json:"custom_tags,omitempty"`
DriverInstancePoolId string `json:"driver_instance_pool_id,omitempty"`
DriverNodeTypeId string `json:"driver_node_type_id,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
Label string `json:"label,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
SparkConf map[string]string `json:"spark_conf,omitempty"`
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
Autoscale *ResourcePipelineClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourcePipelineClusterAwsAttributes `json:"aws_attributes,omitempty"`
ClusterLogConf *ResourcePipelineClusterClusterLogConf `json:"cluster_log_conf,omitempty"`
GcpAttributes *ResourcePipelineClusterGcpAttributes `json:"gcp_attributes,omitempty"`
InitScripts []ResourcePipelineClusterInitScripts `json:"init_scripts,omitempty"`
}
type ResourcePipelineFilters struct {
Exclude []string `json:"exclude,omitempty"`
Include []string `json:"include,omitempty"`
}
type ResourcePipelineLibraryMaven struct {
Coordinates string `json:"coordinates"`
Exclusions []string `json:"exclusions,omitempty"`
Repo string `json:"repo,omitempty"`
}
type ResourcePipelineLibraryNotebook struct {
Path string `json:"path"`
}
type ResourcePipelineLibrary struct {
Jar string `json:"jar,omitempty"`
Whl string `json:"whl,omitempty"`
Maven *ResourcePipelineLibraryMaven `json:"maven,omitempty"`
Notebook *ResourcePipelineLibraryNotebook `json:"notebook,omitempty"`
}
type ResourcePipeline struct {
AllowDuplicateNames bool `json:"allow_duplicate_names,omitempty"`
Channel string `json:"channel,omitempty"`
Configuration map[string]string `json:"configuration,omitempty"`
Continuous bool `json:"continuous,omitempty"`
Development bool `json:"development,omitempty"`
Edition string `json:"edition,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Photon bool `json:"photon,omitempty"`
Storage string `json:"storage,omitempty"`
Target string `json:"target,omitempty"`
Url string `json:"url,omitempty"`
Cluster []ResourcePipelineCluster `json:"cluster,omitempty"`
Filters *ResourcePipelineFilters `json:"filters,omitempty"`
Library []ResourcePipelineLibrary `json:"library,omitempty"`
}

View File

@ -0,0 +1,28 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceRecipientIpAccessList struct {
AllowedIpAddresses []string `json:"allowed_ip_addresses"`
}
type ResourceRecipientTokens struct {
ActivationUrl string `json:"activation_url,omitempty"`
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
ExpirationTime int `json:"expiration_time,omitempty"`
Id string `json:"id,omitempty"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
}
type ResourceRecipient struct {
AuthenticationType string `json:"authentication_type"`
Comment string `json:"comment,omitempty"`
DataRecipientGlobalMetastoreId string `json:"data_recipient_global_metastore_id,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name"`
SharingCode string `json:"sharing_code,omitempty"`
IpAccessList *ResourceRecipientIpAccessList `json:"ip_access_list,omitempty"`
Tokens []ResourceRecipientTokens `json:"tokens,omitempty"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceRepo struct {
Branch string `json:"branch,omitempty"`
CommitHash string `json:"commit_hash,omitempty"`
GitProvider string `json:"git_provider,omitempty"`
Id string `json:"id,omitempty"`
Path string `json:"path,omitempty"`
Tag string `json:"tag,omitempty"`
Url string `json:"url"`
}

View File

@ -0,0 +1,14 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceSchema struct {
CatalogName string `json:"catalog_name"`
Comment string `json:"comment,omitempty"`
ForceDestroy bool `json:"force_destroy,omitempty"`
Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name"`
Owner string `json:"owner,omitempty"`
Properties map[string]string `json:"properties,omitempty"`
}

View File

@ -0,0 +1,11 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceSecret struct {
Id string `json:"id,omitempty"`
Key string `json:"key"`
LastUpdatedTimestamp int `json:"last_updated_timestamp,omitempty"`
Scope string `json:"scope"`
StringValue string `json:"string_value"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceSecretAcl struct {
Id string `json:"id,omitempty"`
Permission string `json:"permission"`
Principal string `json:"principal"`
Scope string `json:"scope"`
}

View File

@ -0,0 +1,16 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceSecretScopeKeyvaultMetadata struct {
DnsName string `json:"dns_name"`
ResourceId string `json:"resource_id"`
}
type ResourceSecretScope struct {
BackendType string `json:"backend_type,omitempty"`
Id string `json:"id,omitempty"`
InitialManagePrincipal string `json:"initial_manage_principal,omitempty"`
Name string `json:"name"`
KeyvaultMetadata *ResourceSecretScopeKeyvaultMetadata `json:"keyvault_metadata,omitempty"`
}

View File

@ -0,0 +1,16 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceServicePrincipal struct {
Active bool `json:"active,omitempty"`
AllowClusterCreate bool `json:"allow_cluster_create,omitempty"`
AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"`
ApplicationId string `json:"application_id,omitempty"`
DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"`
DisplayName string `json:"display_name,omitempty"`
ExternalId string `json:"external_id,omitempty"`
Force bool `json:"force,omitempty"`
Id string `json:"id,omitempty"`
WorkspaceAccess bool `json:"workspace_access,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceServicePrincipalRole struct {
Id string `json:"id,omitempty"`
Role string `json:"role"`
ServicePrincipalId string `json:"service_principal_id"`
}

View File

@ -0,0 +1,10 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceServicePrincipalSecret struct {
Id string `json:"id,omitempty"`
Secret string `json:"secret,omitempty"`
ServicePrincipalId string `json:"service_principal_id"`
Status string `json:"status,omitempty"`
}

View File

@ -0,0 +1,20 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceShareObject struct {
AddedAt int `json:"added_at,omitempty"`
AddedBy string `json:"added_by,omitempty"`
Comment string `json:"comment,omitempty"`
DataObjectType string `json:"data_object_type"`
Name string `json:"name"`
SharedAs string `json:"shared_as,omitempty"`
}
type ResourceShare struct {
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name"`
Object []ResourceShareObject `json:"object,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceSqlDashboard struct {
Id string `json:"id,omitempty"`
Name string `json:"name"`
Tags []string `json:"tags,omitempty"`
}

Some files were not shown because too many files have changed in this diff Show More