Revert "Rename jobs -> workflows" (#118)

This reverts PR #111.

This reverts commit 230811031f.
This commit is contained in:
Pieter Noordhuis 2022-12-01 22:39:15 +01:00 committed by GitHub
parent cdc776d89e
commit d5474c9673
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 31 additions and 31 deletions

View File

@ -4,6 +4,6 @@ import "github.com/databricks/bricks/bundle/config/resources"
// Resources defines Databricks resources associated with the bundle.
type Resources struct {
Workflows map[string]resources.Workflow `json:"workflows,omitempty"`
Jobs map[string]resources.Job `json:"jobs,omitempty"`
Pipelines map[string]resources.Pipeline `json:"pipelines,omitempty"`
}

View File

@ -2,7 +2,7 @@ package resources
import "github.com/databricks/databricks-sdk-go/service/jobs"
type Workflow struct {
type Job struct {
ID string `json:"id,omitempty"`
*jobs.JobSettings

View File

@ -36,7 +36,7 @@ type Root struct {
Artifacts map[string]*Artifact `json:"artifacts,omitempty"`
// Resources contains a description of all Databricks resources
// to deploy in this bundle (e.g. workflows, pipelines, etc.).
// to deploy in this bundle (e.g. jobs, pipelines, etc.).
Resources Resources `json:"resources"`
// Environments can be used to differentiate settings and resources between

View File

@ -0,0 +1,4 @@
resources:
jobs:
my_first_job:
id: 1

View File

@ -1,4 +0,0 @@
resources:
workflows:
my_first_workflow:
id: 1

View File

@ -0,0 +1,4 @@
resources:
jobs:
my_second_job:
id: 2

View File

@ -1,4 +0,0 @@
resources:
workflows:
my_second_workflow:
id: 2

View File

@ -11,10 +11,10 @@ import (
func TestIncludeDefault(t *testing.T) {
b := load(t, "./include_default")
// Test that both workflows were loaded.
keys := maps.Keys(b.Config.Resources.Workflows)
// Test that both jobs were loaded.
keys := maps.Keys(b.Config.Resources.Jobs)
sort.Strings(keys)
assert.Equal(t, []string{"my_first_workflow", "my_second_workflow"}, keys)
assert.Equal(t, "1", b.Config.Resources.Workflows["my_first_workflow"].ID)
assert.Equal(t, "2", b.Config.Resources.Workflows["my_second_workflow"].ID)
assert.Equal(t, []string{"my_first_job", "my_second_job"}, keys)
assert.Equal(t, "1", b.Config.Resources.Jobs["my_first_job"].ID)
assert.Equal(t, "2", b.Config.Resources.Jobs["my_second_job"].ID)
}

View File

@ -1,4 +1,4 @@
resources:
workflows:
this_workflow_isnt_defined:
jobs:
this_job_isnt_defined:
id: 1

View File

@ -8,5 +8,5 @@ import (
func TestIncludeOverride(t *testing.T) {
b := load(t, "./include_override")
assert.Empty(t, b.Config.Resources.Workflows)
assert.Empty(t, b.Config.Resources.Jobs)
}

View File

@ -29,7 +29,7 @@ environments:
development: false
photon: true
workflows:
jobs:
pipeline_schedule:
name: Daily refresh of production pipeline

View File

@ -8,8 +8,8 @@ import (
)
func TestJobAndPipelineDevelopment(t *testing.T) {
b := loadEnvironment(t, "./workflow_and_pipeline", "development")
assert.Len(t, b.Config.Resources.Workflows, 0)
b := loadEnvironment(t, "./job_and_pipeline", "development")
assert.Len(t, b.Config.Resources.Jobs, 0)
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
@ -20,8 +20,8 @@ func TestJobAndPipelineDevelopment(t *testing.T) {
}
func TestJobAndPipelineStaging(t *testing.T) {
b := loadEnvironment(t, "./workflow_and_pipeline", "staging")
assert.Len(t, b.Config.Resources.Workflows, 0)
b := loadEnvironment(t, "./job_and_pipeline", "staging")
assert.Len(t, b.Config.Resources.Jobs, 0)
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
@ -32,8 +32,8 @@ func TestJobAndPipelineStaging(t *testing.T) {
}
func TestJobAndPipelineProduction(t *testing.T) {
b := loadEnvironment(t, "./workflow_and_pipeline", "production")
assert.Len(t, b.Config.Resources.Workflows, 1)
b := loadEnvironment(t, "./job_and_pipeline", "production")
assert.Len(t, b.Config.Resources.Jobs, 1)
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
@ -42,7 +42,7 @@ func TestJobAndPipelineProduction(t *testing.T) {
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
assert.Equal(t, "nyc_taxi_production", p.Target)
j := b.Config.Resources.Workflows["pipeline_schedule"]
j := b.Config.Resources.Jobs["pipeline_schedule"]
assert.Equal(t, "Daily refresh of production pipeline", j.Name)
require.Len(t, j.Tasks, 1)
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)

View File

@ -2,8 +2,8 @@ bundle:
name: yaml_anchors
resources:
workflows:
my_workflow:
jobs:
my_job:
_: &common_cluster
spark_version: "10.4.x-scala2.12"
tasks:

View File

@ -9,9 +9,9 @@ import (
func TestYAMLAnchors(t *testing.T) {
b := load(t, "./yaml_anchors")
assert.Len(t, b.Config.Resources.Workflows, 1)
assert.Len(t, b.Config.Resources.Jobs, 1)
j := b.Config.Resources.Workflows["my_workflow"]
j := b.Config.Resources.Jobs["my_job"]
require.Len(t, j.Tasks, 2)
t0 := j.Tasks[0]