Upgrade TF provider to 1.62.0 (#2030)

## Changes
* Added support for `IsSingleNode`, `Kind` and `UseMlRuntime` for
clusters
* Added support for `CleanRoomsNotebookTask`
* `DaysOfWeek` for pipeline restart window is now a list
This commit is contained in:
Andrew Nester 2024-12-18 15:03:08 +01:00 committed by GitHub
parent 59f0859e00
commit e3b256e753
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 99 additions and 62 deletions

View File

@ -1,3 +1,3 @@
package schema
const ProviderVersion = "1.61.0"
const ProviderVersion = "1.62.0"

View File

@ -317,6 +317,8 @@ type DataSourceClusterClusterInfoSpec struct {
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
Kind string `json:"kind,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
@ -326,6 +328,7 @@ type DataSourceClusterClusterInfoSpec struct {
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *DataSourceClusterClusterInfoSpecAutoscale `json:"autoscale,omitempty"`
AwsAttributes *DataSourceClusterClusterInfoSpecAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *DataSourceClusterClusterInfoSpecAzureAttributes `json:"azure_attributes,omitempty"`
@ -369,7 +372,9 @@ type DataSourceClusterClusterInfo struct {
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty"`
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
JdbcPort int `json:"jdbc_port,omitempty"`
Kind string `json:"kind,omitempty"`
LastRestartedTime int `json:"last_restarted_time,omitempty"`
LastStateLossTime int `json:"last_state_loss_time,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
@ -386,6 +391,7 @@ type DataSourceClusterClusterInfo struct {
State string `json:"state,omitempty"`
StateMessage string `json:"state_message,omitempty"`
TerminatedTime int `json:"terminated_time,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *DataSourceClusterClusterInfoAutoscale `json:"autoscale,omitempty"`
AwsAttributes *DataSourceClusterClusterInfoAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *DataSourceClusterClusterInfoAzureAttributes `json:"azure_attributes,omitempty"`

View File

@ -176,6 +176,8 @@ type ResourceCluster struct {
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsPinned bool `json:"is_pinned,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
Kind string `json:"kind,omitempty"`
NoWait bool `json:"no_wait,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
@ -188,6 +190,7 @@ type ResourceCluster struct {
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
State string `json:"state,omitempty"`
Url string `json:"url,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *ResourceClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceClusterAzureAttributes `json:"azure_attributes,omitempty"`

View File

@ -240,6 +240,8 @@ type ResourceJobJobClusterNewCluster struct {
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
Kind string `json:"kind,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
@ -249,6 +251,7 @@ type ResourceJobJobClusterNewCluster struct {
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *ResourceJobJobClusterNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobJobClusterNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobJobClusterNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
@ -462,6 +465,8 @@ type ResourceJobNewCluster struct {
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
Kind string `json:"kind,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
@ -471,6 +476,7 @@ type ResourceJobNewCluster struct {
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *ResourceJobNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
@ -548,6 +554,13 @@ type ResourceJobSparkSubmitTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobTaskCleanRoomsNotebookTask struct {
CleanRoomName string `json:"clean_room_name"`
Etag string `json:"etag,omitempty"`
NotebookBaseParameters map[string]string `json:"notebook_base_parameters,omitempty"`
NotebookName string `json:"notebook_name"`
}
type ResourceJobTaskConditionTask struct {
Left string `json:"left"`
Op string `json:"op"`
@ -578,6 +591,13 @@ type ResourceJobTaskEmailNotifications struct {
OnSuccess []string `json:"on_success,omitempty"`
}
type ResourceJobTaskForEachTaskTaskCleanRoomsNotebookTask struct {
CleanRoomName string `json:"clean_room_name"`
Etag string `json:"etag,omitempty"`
NotebookBaseParameters map[string]string `json:"notebook_base_parameters,omitempty"`
NotebookName string `json:"notebook_name"`
}
type ResourceJobTaskForEachTaskTaskConditionTask struct {
Left string `json:"left"`
Op string `json:"op"`
@ -814,6 +834,8 @@ type ResourceJobTaskForEachTaskTaskNewCluster struct {
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
Kind string `json:"kind,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
@ -823,6 +845,7 @@ type ResourceJobTaskForEachTaskTaskNewCluster struct {
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *ResourceJobTaskForEachTaskTaskNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobTaskForEachTaskTaskNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobTaskForEachTaskTaskNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
@ -974,6 +997,7 @@ type ResourceJobTaskForEachTaskTask struct {
RunIf string `json:"run_if,omitempty"`
TaskKey string `json:"task_key"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
CleanRoomsNotebookTask *ResourceJobTaskForEachTaskTaskCleanRoomsNotebookTask `json:"clean_rooms_notebook_task,omitempty"`
ConditionTask *ResourceJobTaskForEachTaskTaskConditionTask `json:"condition_task,omitempty"`
DbtTask *ResourceJobTaskForEachTaskTaskDbtTask `json:"dbt_task,omitempty"`
DependsOn []ResourceJobTaskForEachTaskTaskDependsOn `json:"depends_on,omitempty"`
@ -1205,6 +1229,8 @@ type ResourceJobTaskNewCluster struct {
EnableLocalDiskEncryption bool `json:"enable_local_disk_encryption,omitempty"`
IdempotencyToken string `json:"idempotency_token,omitempty"`
InstancePoolId string `json:"instance_pool_id,omitempty"`
IsSingleNode bool `json:"is_single_node,omitempty"`
Kind string `json:"kind,omitempty"`
NodeTypeId string `json:"node_type_id,omitempty"`
NumWorkers int `json:"num_workers,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
@ -1214,6 +1240,7 @@ type ResourceJobTaskNewCluster struct {
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
SparkVersion string `json:"spark_version"`
SshPublicKeys []string `json:"ssh_public_keys,omitempty"`
UseMlRuntime bool `json:"use_ml_runtime,omitempty"`
Autoscale *ResourceJobTaskNewClusterAutoscale `json:"autoscale,omitempty"`
AwsAttributes *ResourceJobTaskNewClusterAwsAttributes `json:"aws_attributes,omitempty"`
AzureAttributes *ResourceJobTaskNewClusterAzureAttributes `json:"azure_attributes,omitempty"`
@ -1365,6 +1392,7 @@ type ResourceJobTask struct {
RunIf string `json:"run_if,omitempty"`
TaskKey string `json:"task_key"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
CleanRoomsNotebookTask *ResourceJobTaskCleanRoomsNotebookTask `json:"clean_rooms_notebook_task,omitempty"`
ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"`
DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"`
DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"`

View File

@ -244,7 +244,7 @@ type ResourcePipelineNotification struct {
}
type ResourcePipelineRestartWindow struct {
DaysOfWeek string `json:"days_of_week,omitempty"`
DaysOfWeek []string `json:"days_of_week,omitempty"`
StartHour int `json:"start_hour"`
TimeZoneId string `json:"time_zone_id,omitempty"`
}

View File

@ -21,7 +21,7 @@ type Root struct {
const ProviderHost = "registry.terraform.io"
const ProviderSource = "databricks/databricks"
const ProviderVersion = "1.61.0"
const ProviderVersion = "1.62.0"
func NewRoot() *Root {
return &Root{