Update Terraform provider schema structs from 1.23.0 (#713)

## Changes

The provider at version 1.24.0 includes a regression for the MLflow
model resource.

To fix this, we explicitly pin the provider version at the version we
generate bindings for.

## Tests

Confirmed that a deploy of said MLflow model resource works with 1.23.0.
This commit is contained in:
Pieter Noordhuis 2023-08-30 15:58:28 +02:00 committed by GitHub
parent ca2f1dc06c
commit aa9e1fc41c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 134 additions and 4 deletions

View File

@ -8,6 +8,7 @@ import (
"strings"
"text/template"
schemapkg "github.com/databricks/cli/bundle/internal/tf/codegen/schema"
tfjson "github.com/hashicorp/terraform-json"
)
@ -32,6 +33,23 @@ func (c *collection) Generate(path string) error {
return tmpl.Execute(f, c)
}
type root struct {
OutputFile string
ProviderVersion string
}
func (r *root) Generate(path string) error {
tmpl := template.Must(template.ParseFiles(fmt.Sprintf("./templates/%s.tmpl", r.OutputFile)))
f, err := os.Create(filepath.Join(path, r.OutputFile))
if err != nil {
return err
}
defer f.Close()
return tmpl.Execute(f, r)
}
func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error {
// Generate types for resources.
var resources []*namedBlock
@ -105,5 +123,17 @@ func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error
}
}
// Generate root.go
{
r := &root{
OutputFile: "root.go",
ProviderVersion: schemapkg.ProviderVersion,
}
err := r.Generate(path)
if err != nil {
return err
}
}
return nil
}

View File

@ -8,6 +8,7 @@ import (
"os"
"path/filepath"
"github.com/hashicorp/go-version"
"github.com/hashicorp/hc-install/product"
"github.com/hashicorp/hc-install/releases"
"github.com/hashicorp/terraform-exec/tfexec"
@ -19,7 +20,7 @@ func (s *Schema) writeTerraformBlock(_ context.Context) error {
"required_providers": map[string]interface{}{
"databricks": map[string]interface{}{
"source": "databricks/databricks",
"version": ">= 1.0.0",
"version": ProviderVersion,
},
},
},
@ -40,9 +41,10 @@ func (s *Schema) installTerraform(ctx context.Context) (path string, err error)
return
}
installer := &releases.LatestVersion{
InstallDir: installDir,
installer := &releases.ExactVersion{
Product: product.Terraform,
Version: version.Must(version.NewVersion("1.5.5")),
InstallDir: installDir,
}
installer.SetLogger(log.Default())

View File

@ -0,0 +1,3 @@
package schema
const ProviderVersion = "1.23.0"

View File

@ -0,0 +1,32 @@
package schema
type Providers struct {
Databricks *Config `json:"databricks,omitempty"`
}
func NewProviders() *Providers {
return &Providers{
Databricks: &Config{},
}
}
type Root struct {
Terraform map[string]any `json:"terraform"`
Provider *Providers `json:"provider,omitempty"`
Data *DataSources `json:"data,omitempty"`
Resource *Resources `json:"resource,omitempty"`
}
func NewRoot() *Root {
return &Root{
Terraform: map[string]interface{}{
"required_providers": map[string]interface{}{
"databricks": map[string]interface{}{
"source": "databricks/databricks",
"version": "1.23.0",
},
},
},
}
}

View File

@ -90,6 +90,7 @@ type DataSourceClusterClusterInfoGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}

View File

@ -26,6 +26,7 @@ type DataSourceInstancePoolPoolInfoDiskSpec struct {
type DataSourceInstancePoolPoolInfoGcpAttributes struct {
GcpAvailability string `json:"gcp_availability,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
}
type DataSourceInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption struct {

View File

@ -124,6 +124,7 @@ type DataSourceJobJobSettingsSettingsJobClusterNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
@ -305,6 +306,7 @@ type DataSourceJobJobSettingsSettingsNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
@ -401,6 +403,11 @@ type DataSourceJobJobSettingsSettingsNotificationSettings struct {
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
}
type DataSourceJobJobSettingsSettingsParameter struct {
Default string `json:"default,omitempty"`
Name string `json:"name,omitempty"`
}
type DataSourceJobJobSettingsSettingsPipelineTask struct {
FullRefresh bool `json:"full_refresh,omitempty"`
PipelineId string `json:"pipeline_id"`
@ -421,6 +428,11 @@ type DataSourceJobJobSettingsSettingsRunAs struct {
UserName string `json:"user_name,omitempty"`
}
type DataSourceJobJobSettingsSettingsRunJobTask struct {
JobId string `json:"job_id"`
JobParameters map[string]string `json:"job_parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsSchedule struct {
PauseStatus string `json:"pause_status,omitempty"`
QuartzCronExpression string `json:"quartz_cron_expression"`
@ -573,6 +585,7 @@ type DataSourceJobJobSettingsSettingsTaskNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
@ -682,6 +695,11 @@ type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskRunJobTask struct {
JobId string `json:"job_id"`
JobParameters map[string]string `json:"job_parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct {
JarUri string `json:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty"`
@ -760,6 +778,7 @@ type DataSourceJobJobSettingsSettingsTask struct {
NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"`
PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
RunJobTask *DataSourceJobJobSettingsSettingsTaskRunJobTask `json:"run_job_task,omitempty"`
SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
@ -821,10 +840,12 @@ type DataSourceJobJobSettingsSettings struct {
NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"`
NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"`
NotificationSettings *DataSourceJobJobSettingsSettingsNotificationSettings `json:"notification_settings,omitempty"`
Parameter []DataSourceJobJobSettingsSettingsParameter `json:"parameter,omitempty"`
PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"`
Queue *DataSourceJobJobSettingsSettingsQueue `json:"queue,omitempty"`
RunAs *DataSourceJobJobSettingsSettingsRunAs `json:"run_as,omitempty"`
RunJobTask *DataSourceJobJobSettingsSettingsRunJobTask `json:"run_job_task,omitempty"`
Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"`
SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"`

View File

@ -68,6 +68,7 @@ type ResourceClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}

View File

@ -0,0 +1,15 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceConnection struct {
Comment string `json:"comment,omitempty"`
ConnectionType string `json:"connection_type"`
Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name"`
Options map[string]string `json:"options"`
Owner string `json:"owner,omitempty"`
Properties map[string]string `json:"properties,omitempty"`
ReadOnly bool `json:"read_only,omitempty"`
}

View File

@ -26,6 +26,7 @@ type ResourceInstancePoolDiskSpec struct {
type ResourceInstancePoolGcpAttributes struct {
GcpAvailability string `json:"gcp_availability,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
}
type ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption struct {

View File

@ -124,6 +124,7 @@ type ResourceJobJobClusterNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
@ -305,6 +306,7 @@ type ResourceJobNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
@ -401,6 +403,11 @@ type ResourceJobNotificationSettings struct {
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
}
type ResourceJobParameter struct {
Default string `json:"default,omitempty"`
Name string `json:"name,omitempty"`
}
type ResourceJobPipelineTask struct {
FullRefresh bool `json:"full_refresh,omitempty"`
PipelineId string `json:"pipeline_id"`
@ -421,6 +428,11 @@ type ResourceJobRunAs struct {
UserName string `json:"user_name,omitempty"`
}
type ResourceJobRunJobTask struct {
JobId string `json:"job_id"`
JobParameters map[string]string `json:"job_parameters,omitempty"`
}
type ResourceJobSchedule struct {
PauseStatus string `json:"pause_status,omitempty"`
QuartzCronExpression string `json:"quartz_cron_expression"`
@ -573,6 +585,7 @@ type ResourceJobTaskNewClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
BootDiskSize int `json:"boot_disk_size,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}
@ -682,6 +695,11 @@ type ResourceJobTaskPythonWheelTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobTaskRunJobTask struct {
JobId string `json:"job_id"`
JobParameters map[string]string `json:"job_parameters,omitempty"`
}
type ResourceJobTaskSparkJarTask struct {
JarUri string `json:"jar_uri,omitempty"`
MainClassName string `json:"main_class_name,omitempty"`
@ -760,6 +778,7 @@ type ResourceJobTask struct {
NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"`
PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"`
RunJobTask *ResourceJobTaskRunJobTask `json:"run_job_task,omitempty"`
SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"`
@ -825,10 +844,12 @@ type ResourceJob struct {
NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"`
NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"`
NotificationSettings *ResourceJobNotificationSettings `json:"notification_settings,omitempty"`
Parameter []ResourceJobParameter `json:"parameter,omitempty"`
PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"`
Queue *ResourceJobQueue `json:"queue,omitempty"`
RunAs *ResourceJobRunAs `json:"run_as,omitempty"`
RunJobTask *ResourceJobRunJobTask `json:"run_job_task,omitempty"`
Schedule *ResourceJobSchedule `json:"schedule,omitempty"`
SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"`

View File

@ -4,6 +4,7 @@ package schema
type ResourceModelServingConfigServedModels struct {
EnvironmentVars map[string]string `json:"environment_vars,omitempty"`
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
Name string `json:"name,omitempty"`

View File

@ -47,6 +47,7 @@ type ResourcePipelineClusterClusterLogConf struct {
type ResourcePipelineClusterGcpAttributes struct {
Availability string `json:"availability,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
LocalSsdCount int `json:"local_ssd_count,omitempty"`
ZoneId string `json:"zone_id,omitempty"`
}

View File

@ -24,7 +24,7 @@ func NewRoot() *Root {
"required_providers": map[string]interface{}{
"databricks": map[string]interface{}{
"source": "databricks/databricks",
"version": ">= 1.0.0",
"version": "1.23.0",
},
},
},