From ce9c9148c96449a57b476659311c33bb843adc67 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 3 Aug 2023 13:20:30 +0200 Subject: [PATCH] Regenerate bundle resource structs from latest terraform provider (#633) ## Changes This PR: 1. Regenerates the terraform provider structs based off the latest terraform provider version: 1.22.0 2. Adds a debug launch configuration for regenerating the schema ## Tests Existing unit tests --- bundle/internal/tf/codegen/.gitignore | 1 + bundle/internal/tf/schema/data_source_job.go | 131 +++++++++++------ .../tf/schema/data_source_metastore.go | 30 ++++ .../tf/schema/data_source_metastores.go | 8 ++ .../tf/schema/data_source_sql_warehouse.go | 2 +- bundle/internal/tf/schema/data_sources.go | 4 + .../resource_access_control_rule_set.go | 15 ++ bundle/internal/tf/schema/resource_group.go | 1 + bundle/internal/tf/schema/resource_job.go | 132 ++++++++++++------ .../tf/schema/resource_service_principal.go | 1 + bundle/internal/tf/schema/resource_user.go | 1 + bundle/internal/tf/schema/resources.go | 2 + 12 files changed, 241 insertions(+), 87 deletions(-) create mode 100644 bundle/internal/tf/schema/data_source_metastore.go create mode 100644 bundle/internal/tf/schema/data_source_metastores.go create mode 100644 bundle/internal/tf/schema/resource_access_control_rule_set.go diff --git a/bundle/internal/tf/codegen/.gitignore b/bundle/internal/tf/codegen/.gitignore index d59e6e95..72f05fc4 100644 --- a/bundle/internal/tf/codegen/.gitignore +++ b/bundle/internal/tf/codegen/.gitignore @@ -1,2 +1,3 @@ /codegen /tmp +/.vscode diff --git a/bundle/internal/tf/schema/data_source_job.go b/bundle/internal/tf/schema/data_source_job.go index a633bd3a..6d2d1aa9 100644 --- a/bundle/internal/tf/schema/data_source_job.go +++ b/bundle/internal/tf/schema/data_source_job.go @@ -25,19 +25,37 @@ type DataSourceJobJobSettingsSettingsDbtTask struct { } type DataSourceJobJobSettingsSettingsEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsGitSourceJobSource struct { + DirtyState string `json:"dirty_state,omitempty"` + ImportFromGitBranch string `json:"import_from_git_branch"` + JobConfigPath string `json:"job_config_path"` } type DataSourceJobJobSettingsSettingsGitSource struct { - Branch string `json:"branch,omitempty"` - Commit string `json:"commit,omitempty"` - Provider string `json:"provider,omitempty"` - Tag string `json:"tag,omitempty"` - Url string `json:"url"` + Branch string `json:"branch,omitempty"` + Commit string `json:"commit,omitempty"` + Provider string `json:"provider,omitempty"` + Tag string `json:"tag,omitempty"` + Url string `json:"url"` + JobSource *DataSourceJobJobSettingsSettingsGitSourceJobSource `json:"job_source,omitempty"` +} + +type DataSourceJobJobSettingsSettingsHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsHealth struct { + Rules []DataSourceJobJobSettingsSettingsHealthRules `json:"rules,omitempty"` } type DataSourceJobJobSettingsSettingsJobClusterNewClusterAutoscale struct { @@ -384,7 +402,8 @@ type DataSourceJobJobSettingsSettingsNotificationSettings struct { } type DataSourceJobJobSettingsSettingsPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type DataSourceJobJobSettingsSettingsPythonWheelTask struct { @@ -445,11 +464,22 @@ type DataSourceJobJobSettingsSettingsTaskDependsOn struct { } type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskHealth struct { + Rules []DataSourceJobJobSettingsSettingsTaskHealthRules `json:"rules,omitempty"` } type DataSourceJobJobSettingsSettingsTaskLibraryCran struct { @@ -634,8 +664,15 @@ type DataSourceJobJobSettingsSettingsTaskNotebookTask struct { Source string `json:"source,omitempty"` } +type DataSourceJobJobSettingsSettingsTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + type DataSourceJobJobSettingsSettingsTaskPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct { @@ -702,29 +739,31 @@ type DataSourceJobJobSettingsSettingsTaskSqlTask struct { } type DataSourceJobJobSettingsSettingsTask struct { - ComputeKey string `json:"compute_key,omitempty"` - Description string `json:"description,omitempty"` - ExistingClusterId string `json:"existing_cluster_id,omitempty"` - JobClusterKey string `json:"job_cluster_key,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` - RunIf string `json:"run_if,omitempty"` - TaskKey string `json:"task_key,omitempty"` - TimeoutSeconds int `json:"timeout_seconds,omitempty"` - ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"` - DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` - DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` - EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` - Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` - NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` - NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"` - PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` - PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` - SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` - SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` - SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` - SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"` + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *DataSourceJobJobSettingsSettingsTaskHealth `json:"health,omitempty"` + Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` + NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"` } type DataSourceJobJobSettingsSettingsTriggerFileArrival struct { @@ -738,6 +777,10 @@ type DataSourceJobJobSettingsSettingsTrigger struct { FileArrival *DataSourceJobJobSettingsSettingsTriggerFileArrival `json:"file_arrival,omitempty"` } +type DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id"` +} + type DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure struct { Id string `json:"id"` } @@ -751,9 +794,10 @@ type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct { } type DataSourceJobJobSettingsSettingsWebhookNotifications struct { - OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` - OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` - OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` + OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } type DataSourceJobJobSettingsSettings struct { @@ -771,6 +815,7 @@ type DataSourceJobJobSettingsSettings struct { DbtTask *DataSourceJobJobSettingsSettingsDbtTask `json:"dbt_task,omitempty"` EmailNotifications *DataSourceJobJobSettingsSettingsEmailNotifications `json:"email_notifications,omitempty"` GitSource *DataSourceJobJobSettingsSettingsGitSource `json:"git_source,omitempty"` + Health *DataSourceJobJobSettingsSettingsHealth `json:"health,omitempty"` JobCluster []DataSourceJobJobSettingsSettingsJobCluster `json:"job_cluster,omitempty"` Library []DataSourceJobJobSettingsSettingsLibrary `json:"library,omitempty"` NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"` diff --git a/bundle/internal/tf/schema/data_source_metastore.go b/bundle/internal/tf/schema/data_source_metastore.go new file mode 100644 index 00000000..dd14be81 --- /dev/null +++ b/bundle/internal/tf/schema/data_source_metastore.go @@ -0,0 +1,30 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceMetastoreMetastoreInfo struct { + Cloud string `json:"cloud,omitempty"` + CreatedAt int `json:"created_at,omitempty"` + CreatedBy string `json:"created_by,omitempty"` + DefaultDataAccessConfigId string `json:"default_data_access_config_id,omitempty"` + DeltaSharingOrganizationName string `json:"delta_sharing_organization_name,omitempty"` + DeltaSharingRecipientTokenLifetimeInSeconds int `json:"delta_sharing_recipient_token_lifetime_in_seconds,omitempty"` + DeltaSharingScope string `json:"delta_sharing_scope,omitempty"` + GlobalMetastoreId string `json:"global_metastore_id,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name,omitempty"` + Owner string `json:"owner,omitempty"` + PrivilegeModelVersion string `json:"privilege_model_version,omitempty"` + Region string `json:"region,omitempty"` + StorageRoot string `json:"storage_root,omitempty"` + StorageRootCredentialId string `json:"storage_root_credential_id,omitempty"` + StorageRootCredentialName string `json:"storage_root_credential_name,omitempty"` + UpdatedAt int `json:"updated_at,omitempty"` + UpdatedBy string `json:"updated_by,omitempty"` +} + +type DataSourceMetastore struct { + Id string `json:"id,omitempty"` + MetastoreId string `json:"metastore_id"` + MetastoreInfo *DataSourceMetastoreMetastoreInfo `json:"metastore_info,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_metastores.go b/bundle/internal/tf/schema/data_source_metastores.go new file mode 100644 index 00000000..c2b6854e --- /dev/null +++ b/bundle/internal/tf/schema/data_source_metastores.go @@ -0,0 +1,8 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceMetastores struct { + Id string `json:"id,omitempty"` + Ids map[string]string `json:"ids,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_sql_warehouse.go b/bundle/internal/tf/schema/data_source_sql_warehouse.go index f90cc9dd..218591d0 100644 --- a/bundle/internal/tf/schema/data_source_sql_warehouse.go +++ b/bundle/internal/tf/schema/data_source_sql_warehouse.go @@ -29,7 +29,7 @@ type DataSourceSqlWarehouse struct { DataSourceId string `json:"data_source_id,omitempty"` EnablePhoton bool `json:"enable_photon,omitempty"` EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"` - Id string `json:"id"` + Id string `json:"id,omitempty"` InstanceProfileArn string `json:"instance_profile_arn,omitempty"` JdbcUrl string `json:"jdbc_url,omitempty"` MaxNumClusters int `json:"max_num_clusters,omitempty"` diff --git a/bundle/internal/tf/schema/data_sources.go b/bundle/internal/tf/schema/data_sources.go index 6fbcf680..79658298 100644 --- a/bundle/internal/tf/schema/data_sources.go +++ b/bundle/internal/tf/schema/data_sources.go @@ -18,6 +18,8 @@ type DataSources struct { InstancePool map[string]*DataSourceInstancePool `json:"databricks_instance_pool,omitempty"` Job map[string]*DataSourceJob `json:"databricks_job,omitempty"` Jobs map[string]*DataSourceJobs `json:"databricks_jobs,omitempty"` + Metastore map[string]*DataSourceMetastore `json:"databricks_metastore,omitempty"` + Metastores map[string]*DataSourceMetastores `json:"databricks_metastores,omitempty"` MwsCredentials map[string]*DataSourceMwsCredentials `json:"databricks_mws_credentials,omitempty"` MwsWorkspaces map[string]*DataSourceMwsWorkspaces `json:"databricks_mws_workspaces,omitempty"` NodeType map[string]*DataSourceNodeType `json:"databricks_node_type,omitempty"` @@ -55,6 +57,8 @@ func NewDataSources() *DataSources { InstancePool: make(map[string]*DataSourceInstancePool), Job: make(map[string]*DataSourceJob), Jobs: make(map[string]*DataSourceJobs), + Metastore: make(map[string]*DataSourceMetastore), + Metastores: make(map[string]*DataSourceMetastores), MwsCredentials: make(map[string]*DataSourceMwsCredentials), MwsWorkspaces: make(map[string]*DataSourceMwsWorkspaces), NodeType: make(map[string]*DataSourceNodeType), diff --git a/bundle/internal/tf/schema/resource_access_control_rule_set.go b/bundle/internal/tf/schema/resource_access_control_rule_set.go new file mode 100644 index 00000000..775c0708 --- /dev/null +++ b/bundle/internal/tf/schema/resource_access_control_rule_set.go @@ -0,0 +1,15 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceAccessControlRuleSetGrantRules struct { + Principals []string `json:"principals,omitempty"` + Role string `json:"role"` +} + +type ResourceAccessControlRuleSet struct { + Etag string `json:"etag,omitempty"` + Id string `json:"id,omitempty"` + Name string `json:"name"` + GrantRules []ResourceAccessControlRuleSetGrantRules `json:"grant_rules,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_group.go b/bundle/internal/tf/schema/resource_group.go index 252d2087..7d7860f5 100644 --- a/bundle/internal/tf/schema/resource_group.go +++ b/bundle/internal/tf/schema/resource_group.go @@ -3,6 +3,7 @@ package schema type ResourceGroup struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"` diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index e3137ea1..77b681ee 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -25,19 +25,37 @@ type ResourceJobDbtTask struct { } type ResourceJobEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobGitSourceJobSource struct { + DirtyState string `json:"dirty_state,omitempty"` + ImportFromGitBranch string `json:"import_from_git_branch"` + JobConfigPath string `json:"job_config_path"` } type ResourceJobGitSource struct { - Branch string `json:"branch,omitempty"` - Commit string `json:"commit,omitempty"` - Provider string `json:"provider,omitempty"` - Tag string `json:"tag,omitempty"` - Url string `json:"url"` + Branch string `json:"branch,omitempty"` + Commit string `json:"commit,omitempty"` + Provider string `json:"provider,omitempty"` + Tag string `json:"tag,omitempty"` + Url string `json:"url"` + JobSource *ResourceJobGitSourceJobSource `json:"job_source,omitempty"` +} + +type ResourceJobHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobHealth struct { + Rules []ResourceJobHealthRules `json:"rules,omitempty"` } type ResourceJobJobClusterNewClusterAutoscale struct { @@ -384,7 +402,8 @@ type ResourceJobNotificationSettings struct { } type ResourceJobPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type ResourceJobPythonWheelTask struct { @@ -445,11 +464,22 @@ type ResourceJobTaskDependsOn struct { } type ResourceJobTaskEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobTaskHealth struct { + Rules []ResourceJobTaskHealthRules `json:"rules,omitempty"` } type ResourceJobTaskLibraryCran struct { @@ -634,8 +664,15 @@ type ResourceJobTaskNotebookTask struct { Source string `json:"source,omitempty"` } +type ResourceJobTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + type ResourceJobTaskPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type ResourceJobTaskPythonWheelTask struct { @@ -702,29 +739,31 @@ type ResourceJobTaskSqlTask struct { } type ResourceJobTask struct { - ComputeKey string `json:"compute_key,omitempty"` - Description string `json:"description,omitempty"` - ExistingClusterId string `json:"existing_cluster_id,omitempty"` - JobClusterKey string `json:"job_cluster_key,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` - RunIf string `json:"run_if,omitempty"` - TaskKey string `json:"task_key,omitempty"` - TimeoutSeconds int `json:"timeout_seconds,omitempty"` - ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"` - DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` - DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` - EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` - Library []ResourceJobTaskLibrary `json:"library,omitempty"` - NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` - NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"` - PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` - PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` - SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` - SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` - SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` - SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"` + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *ResourceJobTaskHealth `json:"health,omitempty"` + Library []ResourceJobTaskLibrary `json:"library,omitempty"` + NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"` } type ResourceJobTriggerFileArrival struct { @@ -738,6 +777,10 @@ type ResourceJobTrigger struct { FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"` } +type ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id"` +} + type ResourceJobWebhookNotificationsOnFailure struct { Id string `json:"id"` } @@ -751,13 +794,15 @@ type ResourceJobWebhookNotificationsOnSuccess struct { } type ResourceJobWebhookNotifications struct { - OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` - OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` - OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` + OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } type ResourceJob struct { AlwaysRunning bool `json:"always_running,omitempty"` + ControlRunState bool `json:"control_run_state,omitempty"` ExistingClusterId string `json:"existing_cluster_id,omitempty"` Format string `json:"format,omitempty"` Id string `json:"id,omitempty"` @@ -774,6 +819,7 @@ type ResourceJob struct { DbtTask *ResourceJobDbtTask `json:"dbt_task,omitempty"` EmailNotifications *ResourceJobEmailNotifications `json:"email_notifications,omitempty"` GitSource *ResourceJobGitSource `json:"git_source,omitempty"` + Health *ResourceJobHealth `json:"health,omitempty"` JobCluster []ResourceJobJobCluster `json:"job_cluster,omitempty"` Library []ResourceJobLibrary `json:"library,omitempty"` NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"` diff --git a/bundle/internal/tf/schema/resource_service_principal.go b/bundle/internal/tf/schema/resource_service_principal.go index bdbce227..5e9943a1 100644 --- a/bundle/internal/tf/schema/resource_service_principal.go +++ b/bundle/internal/tf/schema/resource_service_principal.go @@ -3,6 +3,7 @@ package schema type ResourceServicePrincipal struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` Active bool `json:"active,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` diff --git a/bundle/internal/tf/schema/resource_user.go b/bundle/internal/tf/schema/resource_user.go index b9644093..2fe57b8b 100644 --- a/bundle/internal/tf/schema/resource_user.go +++ b/bundle/internal/tf/schema/resource_user.go @@ -3,6 +3,7 @@ package schema type ResourceUser struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` Active bool `json:"active,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` diff --git a/bundle/internal/tf/schema/resources.go b/bundle/internal/tf/schema/resources.go index 7a0c2eb8..c2361254 100644 --- a/bundle/internal/tf/schema/resources.go +++ b/bundle/internal/tf/schema/resources.go @@ -3,6 +3,7 @@ package schema type Resources struct { + AccessControlRuleSet map[string]*ResourceAccessControlRuleSet `json:"databricks_access_control_rule_set,omitempty"` AwsS3Mount map[string]*ResourceAwsS3Mount `json:"databricks_aws_s3_mount,omitempty"` AzureAdlsGen1Mount map[string]*ResourceAzureAdlsGen1Mount `json:"databricks_azure_adls_gen1_mount,omitempty"` AzureAdlsGen2Mount map[string]*ResourceAzureAdlsGen2Mount `json:"databricks_azure_adls_gen2_mount,omitempty"` @@ -82,6 +83,7 @@ type Resources struct { func NewResources() *Resources { return &Resources{ + AccessControlRuleSet: make(map[string]*ResourceAccessControlRuleSet), AwsS3Mount: make(map[string]*ResourceAwsS3Mount), AzureAdlsGen1Mount: make(map[string]*ResourceAzureAdlsGen1Mount), AzureAdlsGen2Mount: make(map[string]*ResourceAzureAdlsGen2Mount),