Regenerated internal schema structs based on Terraform provider schemas (#401)

## Changes
Regenerated internal schema structs based on Terraform provider schemas

Allows to use `serverless` flag in bundle config.

## Tests
Ran `cli bundle deploy` with bundle which contains pipeline with
serverless key true
This commit is contained in:
Andrew Nester 2023-05-23 19:33:24 +02:00 committed by GitHub
parent 842a559424
commit 273271bc59
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 241 additions and 69 deletions

View File

@ -12,16 +12,17 @@ type Config struct {
AzureTenantId string `json:"azure_tenant_id,omitempty"`
AzureUseMsi bool `json:"azure_use_msi,omitempty"`
AzureWorkspaceResourceId string `json:"azure_workspace_resource_id,omitempty"`
DatabricksCliPath string `json:"databricks_cli_path,omitempty"`
ClientId string `json:"client_id,omitempty"`
ClientSecret string `json:"client_secret,omitempty"`
ConfigFile string `json:"config_file,omitempty"`
DatabricksCliPath string `json:"databricks_cli_path,omitempty"`
DebugHeaders bool `json:"debug_headers,omitempty"`
DebugTruncateBytes int `json:"debug_truncate_bytes,omitempty"`
GoogleCredentials string `json:"google_credentials,omitempty"`
GoogleServiceAccount string `json:"google_service_account,omitempty"`
Host string `json:"host,omitempty"`
HttpTimeoutSeconds int `json:"http_timeout_seconds,omitempty"`
MetadataServiceUrl string `json:"metadata_service_url,omitempty"`
Password string `json:"password,omitempty"`
Profile string `json:"profile,omitempty"`
RateLimit int `json:"rate_limit,omitempty"`

View File

@ -120,12 +120,17 @@ type DataSourceClusterClusterInfoInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type DataSourceClusterClusterInfoInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceClusterClusterInfoInitScripts struct {
Abfss *DataSourceClusterClusterInfoInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceClusterClusterInfoInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceClusterClusterInfoInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceClusterClusterInfoInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceClusterClusterInfoInitScriptsS3 `json:"s3,omitempty"`
Abfss *DataSourceClusterClusterInfoInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceClusterClusterInfoInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceClusterClusterInfoInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceClusterClusterInfoInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceClusterClusterInfoInitScriptsS3 `json:"s3,omitempty"`
Workspace *DataSourceClusterClusterInfoInitScriptsWorkspace `json:"workspace,omitempty"`
}
type DataSourceClusterClusterInfoTerminationReason struct {

View File

@ -6,5 +6,5 @@ type DataSourceClusterPolicy struct {
Definition string `json:"definition,omitempty"`
Id string `json:"id,omitempty"`
MaxClustersPerUser int `json:"max_clusters_per_user,omitempty"`
Name string `json:"name"`
Name string `json:"name,omitempty"`
}

View File

@ -127,12 +127,17 @@ type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScripts struct {
Abfss *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *DataSourceJobJobSettingsSettingsJobClusterNewClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type DataSourceJobJobSettingsSettingsJobClusterNewClusterWorkloadTypeClients struct {
@ -303,12 +308,17 @@ type DataSourceJobJobSettingsSettingsNewClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterInitScripts struct {
Abfss *DataSourceJobJobSettingsSettingsNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceJobJobSettingsSettingsNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsNewClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *DataSourceJobJobSettingsSettingsNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceJobJobSettingsSettingsNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsNewClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *DataSourceJobJobSettingsSettingsNewClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type DataSourceJobJobSettingsSettingsNewClusterWorkloadTypeClients struct {
@ -359,6 +369,11 @@ type DataSourceJobJobSettingsSettingsNotebookTask struct {
Source string `json:"source,omitempty"`
}
type DataSourceJobJobSettingsSettingsNotificationSettings struct {
NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
}
type DataSourceJobJobSettingsSettingsPipelineTask struct {
PipelineId string `json:"pipeline_id"`
}
@ -370,6 +385,9 @@ type DataSourceJobJobSettingsSettingsPythonWheelTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type DataSourceJobJobSettingsSettingsQueue struct {
}
type DataSourceJobJobSettingsSettingsSchedule struct {
PauseStatus string `json:"pause_status,omitempty"`
QuartzCronExpression string `json:"quartz_cron_expression"`
@ -385,6 +403,7 @@ type DataSourceJobJobSettingsSettingsSparkJarTask struct {
type DataSourceJobJobSettingsSettingsSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
Source string `json:"source,omitempty"`
}
type DataSourceJobJobSettingsSettingsSparkSubmitTask struct {
@ -533,12 +552,17 @@ type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterInitScripts struct {
Abfss *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *DataSourceJobJobSettingsSettingsTaskNewClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients struct {
@ -609,6 +633,7 @@ type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct {
type DataSourceJobJobSettingsSettingsTaskSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
Source string `json:"source,omitempty"`
}
type DataSourceJobJobSettingsSettingsTaskSparkSubmitTask struct {
@ -623,6 +648,10 @@ type DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard struct {
DashboardId string `json:"dashboard_id"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskFile struct {
Path string `json:"path"`
}
type DataSourceJobJobSettingsSettingsTaskSqlTaskQuery struct {
QueryId string `json:"query_id"`
}
@ -632,6 +661,7 @@ type DataSourceJobJobSettingsSettingsTaskSqlTask struct {
WarehouseId string `json:"warehouse_id,omitempty"`
Alert *DataSourceJobJobSettingsSettingsTaskSqlTaskAlert `json:"alert,omitempty"`
Dashboard *DataSourceJobJobSettingsSettingsTaskSqlTaskDashboard `json:"dashboard,omitempty"`
File *DataSourceJobJobSettingsSettingsTaskSqlTaskFile `json:"file,omitempty"`
Query *DataSourceJobJobSettingsSettingsTaskSqlTaskQuery `json:"query,omitempty"`
}
@ -642,6 +672,7 @@ type DataSourceJobJobSettingsSettingsTask struct {
MaxRetries int `json:"max_retries,omitempty"`
MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
RunIf string `json:"run_if,omitempty"`
TaskKey string `json:"task_key,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"`
@ -658,6 +689,17 @@ type DataSourceJobJobSettingsSettingsTask struct {
SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"`
}
type DataSourceJobJobSettingsSettingsTriggerFileArrival struct {
MinTimeBetweenTriggerSeconds int `json:"min_time_between_trigger_seconds,omitempty"`
Url string `json:"url"`
WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"`
}
type DataSourceJobJobSettingsSettingsTrigger struct {
PauseStatus string `json:"pause_status,omitempty"`
FileArrival *DataSourceJobJobSettingsSettingsTriggerFileArrival `json:"file_arrival,omitempty"`
}
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure struct {
Id string `json:"id"`
}
@ -694,13 +736,16 @@ type DataSourceJobJobSettingsSettings struct {
Library []DataSourceJobJobSettingsSettingsLibrary `json:"library,omitempty"`
NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"`
NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"`
NotificationSettings *DataSourceJobJobSettingsSettingsNotificationSettings `json:"notification_settings,omitempty"`
PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"`
Queue *DataSourceJobJobSettingsSettingsQueue `json:"queue,omitempty"`
Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"`
SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *DataSourceJobJobSettingsSettingsSparkSubmitTask `json:"spark_submit_task,omitempty"`
Task []DataSourceJobJobSettingsSettingsTask `json:"task,omitempty"`
Trigger *DataSourceJobJobSettingsSettingsTrigger `json:"trigger,omitempty"`
WebhookNotifications *DataSourceJobJobSettingsSettingsWebhookNotifications `json:"webhook_notifications,omitempty"`
}

View File

@ -0,0 +1,9 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourcePipelines struct {
Id string `json:"id,omitempty"`
Ids []string `json:"ids,omitempty"`
PipelineName string `json:"pipeline_name,omitempty"`
}

View File

@ -23,6 +23,7 @@ type DataSources struct {
NodeType map[string]*DataSourceNodeType `json:"databricks_node_type,omitempty"`
Notebook map[string]*DataSourceNotebook `json:"databricks_notebook,omitempty"`
NotebookPaths map[string]*DataSourceNotebookPaths `json:"databricks_notebook_paths,omitempty"`
Pipelines map[string]*DataSourcePipelines `json:"databricks_pipelines,omitempty"`
Schemas map[string]*DataSourceSchemas `json:"databricks_schemas,omitempty"`
ServicePrincipal map[string]*DataSourceServicePrincipal `json:"databricks_service_principal,omitempty"`
ServicePrincipals map[string]*DataSourceServicePrincipals `json:"databricks_service_principals,omitempty"`
@ -59,6 +60,7 @@ func NewDataSources() *DataSources {
NodeType: make(map[string]*DataSourceNodeType),
Notebook: make(map[string]*DataSourceNotebook),
NotebookPaths: make(map[string]*DataSourceNotebookPaths),
Pipelines: make(map[string]*DataSourcePipelines),
Schemas: make(map[string]*DataSourceSchemas),
ServicePrincipal: make(map[string]*DataSourceServicePrincipal),
ServicePrincipals: make(map[string]*DataSourceServicePrincipals),

View File

@ -98,12 +98,17 @@ type ResourceClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type ResourceClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type ResourceClusterInitScripts struct {
Abfss *ResourceClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *ResourceClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *ResourceClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type ResourceClusterLibraryCran struct {

View File

@ -3,9 +3,12 @@
package schema
type ResourceClusterPolicy struct {
Definition string `json:"definition"`
Id string `json:"id,omitempty"`
MaxClustersPerUser int `json:"max_clusters_per_user,omitempty"`
Name string `json:"name"`
PolicyId string `json:"policy_id,omitempty"`
Definition string `json:"definition,omitempty"`
Description string `json:"description,omitempty"`
Id string `json:"id,omitempty"`
MaxClustersPerUser int `json:"max_clusters_per_user,omitempty"`
Name string `json:"name"`
PolicyFamilyDefinitionOverrides string `json:"policy_family_definition_overrides,omitempty"`
PolicyFamilyId string `json:"policy_family_id,omitempty"`
PolicyId string `json:"policy_id,omitempty"`
}

View File

@ -127,12 +127,17 @@ type ResourceJobJobClusterNewClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type ResourceJobJobClusterNewClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobJobClusterNewClusterInitScripts struct {
Abfss *ResourceJobJobClusterNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceJobJobClusterNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobJobClusterNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobJobClusterNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobJobClusterNewClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *ResourceJobJobClusterNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceJobJobClusterNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobJobClusterNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobJobClusterNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobJobClusterNewClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *ResourceJobJobClusterNewClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type ResourceJobJobClusterNewClusterWorkloadTypeClients struct {
@ -303,12 +308,17 @@ type ResourceJobNewClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type ResourceJobNewClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobNewClusterInitScripts struct {
Abfss *ResourceJobNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceJobNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobNewClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *ResourceJobNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceJobNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobNewClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *ResourceJobNewClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type ResourceJobNewClusterWorkloadTypeClients struct {
@ -359,6 +369,11 @@ type ResourceJobNotebookTask struct {
Source string `json:"source,omitempty"`
}
type ResourceJobNotificationSettings struct {
NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"`
NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"`
}
type ResourceJobPipelineTask struct {
PipelineId string `json:"pipeline_id"`
}
@ -370,6 +385,9 @@ type ResourceJobPythonWheelTask struct {
Parameters []string `json:"parameters,omitempty"`
}
type ResourceJobQueue struct {
}
type ResourceJobSchedule struct {
PauseStatus string `json:"pause_status,omitempty"`
QuartzCronExpression string `json:"quartz_cron_expression"`
@ -385,6 +403,7 @@ type ResourceJobSparkJarTask struct {
type ResourceJobSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
Source string `json:"source,omitempty"`
}
type ResourceJobSparkSubmitTask struct {
@ -533,12 +552,17 @@ type ResourceJobTaskNewClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type ResourceJobTaskNewClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type ResourceJobTaskNewClusterInitScripts struct {
Abfss *ResourceJobTaskNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceJobTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobTaskNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobTaskNewClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *ResourceJobTaskNewClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourceJobTaskNewClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourceJobTaskNewClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourceJobTaskNewClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourceJobTaskNewClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *ResourceJobTaskNewClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type ResourceJobTaskNewClusterWorkloadTypeClients struct {
@ -609,6 +633,7 @@ type ResourceJobTaskSparkJarTask struct {
type ResourceJobTaskSparkPythonTask struct {
Parameters []string `json:"parameters,omitempty"`
PythonFile string `json:"python_file"`
Source string `json:"source,omitempty"`
}
type ResourceJobTaskSparkSubmitTask struct {
@ -623,6 +648,10 @@ type ResourceJobTaskSqlTaskDashboard struct {
DashboardId string `json:"dashboard_id"`
}
type ResourceJobTaskSqlTaskFile struct {
Path string `json:"path"`
}
type ResourceJobTaskSqlTaskQuery struct {
QueryId string `json:"query_id"`
}
@ -632,6 +661,7 @@ type ResourceJobTaskSqlTask struct {
WarehouseId string `json:"warehouse_id,omitempty"`
Alert *ResourceJobTaskSqlTaskAlert `json:"alert,omitempty"`
Dashboard *ResourceJobTaskSqlTaskDashboard `json:"dashboard,omitempty"`
File *ResourceJobTaskSqlTaskFile `json:"file,omitempty"`
Query *ResourceJobTaskSqlTaskQuery `json:"query,omitempty"`
}
@ -642,6 +672,7 @@ type ResourceJobTask struct {
MaxRetries int `json:"max_retries,omitempty"`
MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"`
RetryOnTimeout bool `json:"retry_on_timeout,omitempty"`
RunIf string `json:"run_if,omitempty"`
TaskKey string `json:"task_key,omitempty"`
TimeoutSeconds int `json:"timeout_seconds,omitempty"`
DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"`
@ -658,6 +689,17 @@ type ResourceJobTask struct {
SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"`
}
type ResourceJobTriggerFileArrival struct {
MinTimeBetweenTriggerSeconds int `json:"min_time_between_trigger_seconds,omitempty"`
Url string `json:"url"`
WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"`
}
type ResourceJobTrigger struct {
PauseStatus string `json:"pause_status,omitempty"`
FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"`
}
type ResourceJobWebhookNotificationsOnFailure struct {
Id string `json:"id"`
}
@ -697,12 +739,15 @@ type ResourceJob struct {
Library []ResourceJobLibrary `json:"library,omitempty"`
NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"`
NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"`
NotificationSettings *ResourceJobNotificationSettings `json:"notification_settings,omitempty"`
PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"`
PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"`
Queue *ResourceJobQueue `json:"queue,omitempty"`
Schedule *ResourceJobSchedule `json:"schedule,omitempty"`
SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"`
SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"`
SparkSubmitTask *ResourceJobSparkSubmitTask `json:"spark_submit_task,omitempty"`
Task []ResourceJobTask `json:"task,omitempty"`
Trigger *ResourceJobTrigger `json:"trigger,omitempty"`
WebhookNotifications *ResourceJobWebhookNotifications `json:"webhook_notifications,omitempty"`
}

View File

@ -30,5 +30,7 @@ type ResourcePermissions struct {
SqlDashboardId string `json:"sql_dashboard_id,omitempty"`
SqlEndpointId string `json:"sql_endpoint_id,omitempty"`
SqlQueryId string `json:"sql_query_id,omitempty"`
WorkspaceFileId string `json:"workspace_file_id,omitempty"`
WorkspaceFilePath string `json:"workspace_file_path,omitempty"`
AccessControl []ResourcePermissionsAccessControl `json:"access_control,omitempty"`
}

View File

@ -76,12 +76,17 @@ type ResourcePipelineClusterInitScriptsS3 struct {
Region string `json:"region,omitempty"`
}
type ResourcePipelineClusterInitScriptsWorkspace struct {
Destination string `json:"destination,omitempty"`
}
type ResourcePipelineClusterInitScripts struct {
Abfss *ResourcePipelineClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourcePipelineClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourcePipelineClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourcePipelineClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourcePipelineClusterInitScriptsS3 `json:"s3,omitempty"`
Abfss *ResourcePipelineClusterInitScriptsAbfss `json:"abfss,omitempty"`
Dbfs *ResourcePipelineClusterInitScriptsDbfs `json:"dbfs,omitempty"`
File *ResourcePipelineClusterInitScriptsFile `json:"file,omitempty"`
Gcs *ResourcePipelineClusterInitScriptsGcs `json:"gcs,omitempty"`
S3 *ResourcePipelineClusterInitScriptsS3 `json:"s3,omitempty"`
Workspace *ResourcePipelineClusterInitScriptsWorkspace `json:"workspace,omitempty"`
}
type ResourcePipelineCluster struct {
@ -133,21 +138,28 @@ type ResourcePipelineLibrary struct {
Notebook *ResourcePipelineLibraryNotebook `json:"notebook,omitempty"`
}
type ResourcePipeline struct {
AllowDuplicateNames bool `json:"allow_duplicate_names,omitempty"`
Catalog string `json:"catalog,omitempty"`
Channel string `json:"channel,omitempty"`
Configuration map[string]string `json:"configuration,omitempty"`
Continuous bool `json:"continuous,omitempty"`
Development bool `json:"development,omitempty"`
Edition string `json:"edition,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Photon bool `json:"photon,omitempty"`
Storage string `json:"storage,omitempty"`
Target string `json:"target,omitempty"`
Url string `json:"url,omitempty"`
Cluster []ResourcePipelineCluster `json:"cluster,omitempty"`
Filters *ResourcePipelineFilters `json:"filters,omitempty"`
Library []ResourcePipelineLibrary `json:"library,omitempty"`
type ResourcePipelineNotification struct {
Alerts []string `json:"alerts"`
EmailRecipients []string `json:"email_recipients"`
}
type ResourcePipeline struct {
AllowDuplicateNames bool `json:"allow_duplicate_names,omitempty"`
Catalog string `json:"catalog,omitempty"`
Channel string `json:"channel,omitempty"`
Configuration map[string]string `json:"configuration,omitempty"`
Continuous bool `json:"continuous,omitempty"`
Development bool `json:"development,omitempty"`
Edition string `json:"edition,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name,omitempty"`
Photon bool `json:"photon,omitempty"`
Serverless bool `json:"serverless,omitempty"`
Storage string `json:"storage,omitempty"`
Target string `json:"target,omitempty"`
Url string `json:"url,omitempty"`
Cluster []ResourcePipelineCluster `json:"cluster,omitempty"`
Filters *ResourcePipelineFilters `json:"filters,omitempty"`
Library []ResourcePipelineLibrary `json:"library,omitempty"`
Notification []ResourcePipelineNotification `json:"notification,omitempty"`
}

View File

@ -0,0 +1,26 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceSqlTableColumn struct {
Comment string `json:"comment,omitempty"`
Name string `json:"name"`
Nullable bool `json:"nullable,omitempty"`
Type string `json:"type"`
}
type ResourceSqlTable struct {
CatalogName string `json:"catalog_name"`
ClusterId string `json:"cluster_id,omitempty"`
Comment string `json:"comment,omitempty"`
DataSourceFormat string `json:"data_source_format,omitempty"`
Id string `json:"id,omitempty"`
Name string `json:"name"`
Properties map[string]string `json:"properties,omitempty"`
SchemaName string `json:"schema_name"`
StorageCredentialName string `json:"storage_credential_name,omitempty"`
StorageLocation string `json:"storage_location,omitempty"`
TableType string `json:"table_type"`
ViewDefinition string `json:"view_definition,omitempty"`
Column []ResourceSqlTableColumn `json:"column,omitempty"`
}

View File

@ -0,0 +1,13 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceWorkspaceFile struct {
ContentBase64 string `json:"content_base64,omitempty"`
Id string `json:"id,omitempty"`
Md5 string `json:"md5,omitempty"`
ObjectId int `json:"object_id,omitempty"`
Path string `json:"path"`
Source string `json:"source,omitempty"`
Url string `json:"url,omitempty"`
}

View File

@ -65,6 +65,7 @@ type Resources struct {
SqlGlobalConfig map[string]*ResourceSqlGlobalConfig `json:"databricks_sql_global_config,omitempty"`
SqlPermissions map[string]*ResourceSqlPermissions `json:"databricks_sql_permissions,omitempty"`
SqlQuery map[string]*ResourceSqlQuery `json:"databricks_sql_query,omitempty"`
SqlTable map[string]*ResourceSqlTable `json:"databricks_sql_table,omitempty"`
SqlVisualization map[string]*ResourceSqlVisualization `json:"databricks_sql_visualization,omitempty"`
SqlWidget map[string]*ResourceSqlWidget `json:"databricks_sql_widget,omitempty"`
StorageCredential map[string]*ResourceStorageCredential `json:"databricks_storage_credential,omitempty"`
@ -74,6 +75,7 @@ type Resources struct {
UserInstanceProfile map[string]*ResourceUserInstanceProfile `json:"databricks_user_instance_profile,omitempty"`
UserRole map[string]*ResourceUserRole `json:"databricks_user_role,omitempty"`
WorkspaceConf map[string]*ResourceWorkspaceConf `json:"databricks_workspace_conf,omitempty"`
WorkspaceFile map[string]*ResourceWorkspaceFile `json:"databricks_workspace_file,omitempty"`
}
func NewResources() *Resources {
@ -140,6 +142,7 @@ func NewResources() *Resources {
SqlGlobalConfig: make(map[string]*ResourceSqlGlobalConfig),
SqlPermissions: make(map[string]*ResourceSqlPermissions),
SqlQuery: make(map[string]*ResourceSqlQuery),
SqlTable: make(map[string]*ResourceSqlTable),
SqlVisualization: make(map[string]*ResourceSqlVisualization),
SqlWidget: make(map[string]*ResourceSqlWidget),
StorageCredential: make(map[string]*ResourceStorageCredential),
@ -149,5 +152,6 @@ func NewResources() *Resources {
UserInstanceProfile: make(map[string]*ResourceUserInstanceProfile),
UserRole: make(map[string]*ResourceUserRole),
WorkspaceConf: make(map[string]*ResourceWorkspaceConf),
WorkspaceFile: make(map[string]*ResourceWorkspaceFile),
}
}