mirror of https://github.com/databricks/cli.git
Update Terraform provider schema structs from 1.23.0 (#713)
## Changes The provider at version 1.24.0 includes a regression for the MLflow model resource. To fix this, we explicitly pin the provider version at the version we generate bindings for. ## Tests Confirmed that a deploy of said MLflow model resource works with 1.23.0.
This commit is contained in:
parent
ca2f1dc06c
commit
aa9e1fc41c
|
@ -8,6 +8,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
|
schemapkg "github.com/databricks/cli/bundle/internal/tf/codegen/schema"
|
||||||
tfjson "github.com/hashicorp/terraform-json"
|
tfjson "github.com/hashicorp/terraform-json"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -32,6 +33,23 @@ func (c *collection) Generate(path string) error {
|
||||||
return tmpl.Execute(f, c)
|
return tmpl.Execute(f, c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type root struct {
|
||||||
|
OutputFile string
|
||||||
|
ProviderVersion string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *root) Generate(path string) error {
|
||||||
|
tmpl := template.Must(template.ParseFiles(fmt.Sprintf("./templates/%s.tmpl", r.OutputFile)))
|
||||||
|
f, err := os.Create(filepath.Join(path, r.OutputFile))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
return tmpl.Execute(f, r)
|
||||||
|
}
|
||||||
|
|
||||||
func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error {
|
func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error {
|
||||||
// Generate types for resources.
|
// Generate types for resources.
|
||||||
var resources []*namedBlock
|
var resources []*namedBlock
|
||||||
|
@ -105,5 +123,17 @@ func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generate root.go
|
||||||
|
{
|
||||||
|
r := &root{
|
||||||
|
OutputFile: "root.go",
|
||||||
|
ProviderVersion: schemapkg.ProviderVersion,
|
||||||
|
}
|
||||||
|
err := r.Generate(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/hashicorp/go-version"
|
||||||
"github.com/hashicorp/hc-install/product"
|
"github.com/hashicorp/hc-install/product"
|
||||||
"github.com/hashicorp/hc-install/releases"
|
"github.com/hashicorp/hc-install/releases"
|
||||||
"github.com/hashicorp/terraform-exec/tfexec"
|
"github.com/hashicorp/terraform-exec/tfexec"
|
||||||
|
@ -19,7 +20,7 @@ func (s *Schema) writeTerraformBlock(_ context.Context) error {
|
||||||
"required_providers": map[string]interface{}{
|
"required_providers": map[string]interface{}{
|
||||||
"databricks": map[string]interface{}{
|
"databricks": map[string]interface{}{
|
||||||
"source": "databricks/databricks",
|
"source": "databricks/databricks",
|
||||||
"version": ">= 1.0.0",
|
"version": ProviderVersion,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -40,9 +41,10 @@ func (s *Schema) installTerraform(ctx context.Context) (path string, err error)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
installer := &releases.LatestVersion{
|
installer := &releases.ExactVersion{
|
||||||
InstallDir: installDir,
|
|
||||||
Product: product.Terraform,
|
Product: product.Terraform,
|
||||||
|
Version: version.Must(version.NewVersion("1.5.5")),
|
||||||
|
InstallDir: installDir,
|
||||||
}
|
}
|
||||||
|
|
||||||
installer.SetLogger(log.Default())
|
installer.SetLogger(log.Default())
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
package schema
|
||||||
|
|
||||||
|
const ProviderVersion = "1.23.0"
|
|
@ -0,0 +1,32 @@
|
||||||
|
package schema
|
||||||
|
|
||||||
|
type Providers struct {
|
||||||
|
Databricks *Config `json:"databricks,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewProviders() *Providers {
|
||||||
|
return &Providers{
|
||||||
|
Databricks: &Config{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Root struct {
|
||||||
|
Terraform map[string]any `json:"terraform"`
|
||||||
|
|
||||||
|
Provider *Providers `json:"provider,omitempty"`
|
||||||
|
Data *DataSources `json:"data,omitempty"`
|
||||||
|
Resource *Resources `json:"resource,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewRoot() *Root {
|
||||||
|
return &Root{
|
||||||
|
Terraform: map[string]interface{}{
|
||||||
|
"required_providers": map[string]interface{}{
|
||||||
|
"databricks": map[string]interface{}{
|
||||||
|
"source": "databricks/databricks",
|
||||||
|
"version": "1.23.0",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
|
@ -90,6 +90,7 @@ type DataSourceClusterClusterInfoGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ type DataSourceInstancePoolPoolInfoDiskSpec struct {
|
||||||
|
|
||||||
type DataSourceInstancePoolPoolInfoGcpAttributes struct {
|
type DataSourceInstancePoolPoolInfoGcpAttributes struct {
|
||||||
GcpAvailability string `json:"gcp_availability,omitempty"`
|
GcpAvailability string `json:"gcp_availability,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type DataSourceInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption struct {
|
type DataSourceInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption struct {
|
||||||
|
|
|
@ -124,6 +124,7 @@ type DataSourceJobJobSettingsSettingsJobClusterNewClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -305,6 +306,7 @@ type DataSourceJobJobSettingsSettingsNewClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -401,6 +403,11 @@ type DataSourceJobJobSettingsSettingsNotificationSettings struct {
|
||||||
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
|
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DataSourceJobJobSettingsSettingsParameter struct {
|
||||||
|
Default string `json:"default,omitempty"`
|
||||||
|
Name string `json:"name,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type DataSourceJobJobSettingsSettingsPipelineTask struct {
|
type DataSourceJobJobSettingsSettingsPipelineTask struct {
|
||||||
FullRefresh bool `json:"full_refresh,omitempty"`
|
FullRefresh bool `json:"full_refresh,omitempty"`
|
||||||
PipelineId string `json:"pipeline_id"`
|
PipelineId string `json:"pipeline_id"`
|
||||||
|
@ -421,6 +428,11 @@ type DataSourceJobJobSettingsSettingsRunAs struct {
|
||||||
UserName string `json:"user_name,omitempty"`
|
UserName string `json:"user_name,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DataSourceJobJobSettingsSettingsRunJobTask struct {
|
||||||
|
JobId string `json:"job_id"`
|
||||||
|
JobParameters map[string]string `json:"job_parameters,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type DataSourceJobJobSettingsSettingsSchedule struct {
|
type DataSourceJobJobSettingsSettingsSchedule struct {
|
||||||
PauseStatus string `json:"pause_status,omitempty"`
|
PauseStatus string `json:"pause_status,omitempty"`
|
||||||
QuartzCronExpression string `json:"quartz_cron_expression"`
|
QuartzCronExpression string `json:"quartz_cron_expression"`
|
||||||
|
@ -573,6 +585,7 @@ type DataSourceJobJobSettingsSettingsTaskNewClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -682,6 +695,11 @@ type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct {
|
||||||
Parameters []string `json:"parameters,omitempty"`
|
Parameters []string `json:"parameters,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DataSourceJobJobSettingsSettingsTaskRunJobTask struct {
|
||||||
|
JobId string `json:"job_id"`
|
||||||
|
JobParameters map[string]string `json:"job_parameters,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct {
|
type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct {
|
||||||
JarUri string `json:"jar_uri,omitempty"`
|
JarUri string `json:"jar_uri,omitempty"`
|
||||||
MainClassName string `json:"main_class_name,omitempty"`
|
MainClassName string `json:"main_class_name,omitempty"`
|
||||||
|
@ -760,6 +778,7 @@ type DataSourceJobJobSettingsSettingsTask struct {
|
||||||
NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"`
|
NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"`
|
||||||
PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"`
|
PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"`
|
||||||
PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
|
PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
|
||||||
|
RunJobTask *DataSourceJobJobSettingsSettingsTaskRunJobTask `json:"run_job_task,omitempty"`
|
||||||
SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"`
|
SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"`
|
||||||
SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"`
|
SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"`
|
||||||
SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
|
SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
|
||||||
|
@ -821,10 +840,12 @@ type DataSourceJobJobSettingsSettings struct {
|
||||||
NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"`
|
NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"`
|
||||||
NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"`
|
NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"`
|
||||||
NotificationSettings *DataSourceJobJobSettingsSettingsNotificationSettings `json:"notification_settings,omitempty"`
|
NotificationSettings *DataSourceJobJobSettingsSettingsNotificationSettings `json:"notification_settings,omitempty"`
|
||||||
|
Parameter []DataSourceJobJobSettingsSettingsParameter `json:"parameter,omitempty"`
|
||||||
PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"`
|
PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"`
|
||||||
PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"`
|
PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"`
|
||||||
Queue *DataSourceJobJobSettingsSettingsQueue `json:"queue,omitempty"`
|
Queue *DataSourceJobJobSettingsSettingsQueue `json:"queue,omitempty"`
|
||||||
RunAs *DataSourceJobJobSettingsSettingsRunAs `json:"run_as,omitempty"`
|
RunAs *DataSourceJobJobSettingsSettingsRunAs `json:"run_as,omitempty"`
|
||||||
|
RunJobTask *DataSourceJobJobSettingsSettingsRunJobTask `json:"run_job_task,omitempty"`
|
||||||
Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"`
|
Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"`
|
||||||
SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"`
|
SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"`
|
||||||
SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"`
|
SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"`
|
||||||
|
|
|
@ -68,6 +68,7 @@ type ResourceClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
|
||||||
|
|
||||||
|
package schema
|
||||||
|
|
||||||
|
type ResourceConnection struct {
|
||||||
|
Comment string `json:"comment,omitempty"`
|
||||||
|
ConnectionType string `json:"connection_type"`
|
||||||
|
Id string `json:"id,omitempty"`
|
||||||
|
MetastoreId string `json:"metastore_id,omitempty"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Options map[string]string `json:"options"`
|
||||||
|
Owner string `json:"owner,omitempty"`
|
||||||
|
Properties map[string]string `json:"properties,omitempty"`
|
||||||
|
ReadOnly bool `json:"read_only,omitempty"`
|
||||||
|
}
|
|
@ -26,6 +26,7 @@ type ResourceInstancePoolDiskSpec struct {
|
||||||
|
|
||||||
type ResourceInstancePoolGcpAttributes struct {
|
type ResourceInstancePoolGcpAttributes struct {
|
||||||
GcpAvailability string `json:"gcp_availability,omitempty"`
|
GcpAvailability string `json:"gcp_availability,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption struct {
|
type ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption struct {
|
||||||
|
|
|
@ -124,6 +124,7 @@ type ResourceJobJobClusterNewClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -305,6 +306,7 @@ type ResourceJobNewClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -401,6 +403,11 @@ type ResourceJobNotificationSettings struct {
|
||||||
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
|
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ResourceJobParameter struct {
|
||||||
|
Default string `json:"default,omitempty"`
|
||||||
|
Name string `json:"name,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type ResourceJobPipelineTask struct {
|
type ResourceJobPipelineTask struct {
|
||||||
FullRefresh bool `json:"full_refresh,omitempty"`
|
FullRefresh bool `json:"full_refresh,omitempty"`
|
||||||
PipelineId string `json:"pipeline_id"`
|
PipelineId string `json:"pipeline_id"`
|
||||||
|
@ -421,6 +428,11 @@ type ResourceJobRunAs struct {
|
||||||
UserName string `json:"user_name,omitempty"`
|
UserName string `json:"user_name,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ResourceJobRunJobTask struct {
|
||||||
|
JobId string `json:"job_id"`
|
||||||
|
JobParameters map[string]string `json:"job_parameters,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type ResourceJobSchedule struct {
|
type ResourceJobSchedule struct {
|
||||||
PauseStatus string `json:"pause_status,omitempty"`
|
PauseStatus string `json:"pause_status,omitempty"`
|
||||||
QuartzCronExpression string `json:"quartz_cron_expression"`
|
QuartzCronExpression string `json:"quartz_cron_expression"`
|
||||||
|
@ -573,6 +585,7 @@ type ResourceJobTaskNewClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
BootDiskSize int `json:"boot_disk_size,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
@ -682,6 +695,11 @@ type ResourceJobTaskPythonWheelTask struct {
|
||||||
Parameters []string `json:"parameters,omitempty"`
|
Parameters []string `json:"parameters,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ResourceJobTaskRunJobTask struct {
|
||||||
|
JobId string `json:"job_id"`
|
||||||
|
JobParameters map[string]string `json:"job_parameters,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type ResourceJobTaskSparkJarTask struct {
|
type ResourceJobTaskSparkJarTask struct {
|
||||||
JarUri string `json:"jar_uri,omitempty"`
|
JarUri string `json:"jar_uri,omitempty"`
|
||||||
MainClassName string `json:"main_class_name,omitempty"`
|
MainClassName string `json:"main_class_name,omitempty"`
|
||||||
|
@ -760,6 +778,7 @@ type ResourceJobTask struct {
|
||||||
NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"`
|
NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"`
|
||||||
PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"`
|
PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"`
|
||||||
PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
|
PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
|
||||||
|
RunJobTask *ResourceJobTaskRunJobTask `json:"run_job_task,omitempty"`
|
||||||
SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"`
|
SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"`
|
||||||
SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"`
|
SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"`
|
||||||
SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
|
SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
|
||||||
|
@ -825,10 +844,12 @@ type ResourceJob struct {
|
||||||
NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"`
|
NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"`
|
||||||
NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"`
|
NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"`
|
||||||
NotificationSettings *ResourceJobNotificationSettings `json:"notification_settings,omitempty"`
|
NotificationSettings *ResourceJobNotificationSettings `json:"notification_settings,omitempty"`
|
||||||
|
Parameter []ResourceJobParameter `json:"parameter,omitempty"`
|
||||||
PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"`
|
PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"`
|
||||||
PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"`
|
PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"`
|
||||||
Queue *ResourceJobQueue `json:"queue,omitempty"`
|
Queue *ResourceJobQueue `json:"queue,omitempty"`
|
||||||
RunAs *ResourceJobRunAs `json:"run_as,omitempty"`
|
RunAs *ResourceJobRunAs `json:"run_as,omitempty"`
|
||||||
|
RunJobTask *ResourceJobRunJobTask `json:"run_job_task,omitempty"`
|
||||||
Schedule *ResourceJobSchedule `json:"schedule,omitempty"`
|
Schedule *ResourceJobSchedule `json:"schedule,omitempty"`
|
||||||
SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"`
|
SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"`
|
||||||
SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"`
|
SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"`
|
||||||
|
|
|
@ -4,6 +4,7 @@ package schema
|
||||||
|
|
||||||
type ResourceModelServingConfigServedModels struct {
|
type ResourceModelServingConfigServedModels struct {
|
||||||
EnvironmentVars map[string]string `json:"environment_vars,omitempty"`
|
EnvironmentVars map[string]string `json:"environment_vars,omitempty"`
|
||||||
|
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
|
||||||
ModelName string `json:"model_name"`
|
ModelName string `json:"model_name"`
|
||||||
ModelVersion string `json:"model_version"`
|
ModelVersion string `json:"model_version"`
|
||||||
Name string `json:"name,omitempty"`
|
Name string `json:"name,omitempty"`
|
||||||
|
|
|
@ -47,6 +47,7 @@ type ResourcePipelineClusterClusterLogConf struct {
|
||||||
type ResourcePipelineClusterGcpAttributes struct {
|
type ResourcePipelineClusterGcpAttributes struct {
|
||||||
Availability string `json:"availability,omitempty"`
|
Availability string `json:"availability,omitempty"`
|
||||||
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
GoogleServiceAccount string `json:"google_service_account,omitempty"`
|
||||||
|
LocalSsdCount int `json:"local_ssd_count,omitempty"`
|
||||||
ZoneId string `json:"zone_id,omitempty"`
|
ZoneId string `json:"zone_id,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ func NewRoot() *Root {
|
||||||
"required_providers": map[string]interface{}{
|
"required_providers": map[string]interface{}{
|
||||||
"databricks": map[string]interface{}{
|
"databricks": map[string]interface{}{
|
||||||
"source": "databricks/databricks",
|
"source": "databricks/databricks",
|
||||||
"version": ">= 1.0.0",
|
"version": "1.23.0",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
Loading…
Reference in New Issue