mirror of https://github.com/databricks/cli.git
Revert "Rename jobs -> workflows" (#118)
This reverts PR #111.
This reverts commit 230811031f
.
This commit is contained in:
parent
cdc776d89e
commit
d5474c9673
|
@ -4,6 +4,6 @@ import "github.com/databricks/bricks/bundle/config/resources"
|
||||||
|
|
||||||
// Resources defines Databricks resources associated with the bundle.
|
// Resources defines Databricks resources associated with the bundle.
|
||||||
type Resources struct {
|
type Resources struct {
|
||||||
Workflows map[string]resources.Workflow `json:"workflows,omitempty"`
|
Jobs map[string]resources.Job `json:"jobs,omitempty"`
|
||||||
Pipelines map[string]resources.Pipeline `json:"pipelines,omitempty"`
|
Pipelines map[string]resources.Pipeline `json:"pipelines,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ package resources
|
||||||
|
|
||||||
import "github.com/databricks/databricks-sdk-go/service/jobs"
|
import "github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
|
||||||
type Workflow struct {
|
type Job struct {
|
||||||
ID string `json:"id,omitempty"`
|
ID string `json:"id,omitempty"`
|
||||||
|
|
||||||
*jobs.JobSettings
|
*jobs.JobSettings
|
|
@ -36,7 +36,7 @@ type Root struct {
|
||||||
Artifacts map[string]*Artifact `json:"artifacts,omitempty"`
|
Artifacts map[string]*Artifact `json:"artifacts,omitempty"`
|
||||||
|
|
||||||
// Resources contains a description of all Databricks resources
|
// Resources contains a description of all Databricks resources
|
||||||
// to deploy in this bundle (e.g. workflows, pipelines, etc.).
|
// to deploy in this bundle (e.g. jobs, pipelines, etc.).
|
||||||
Resources Resources `json:"resources"`
|
Resources Resources `json:"resources"`
|
||||||
|
|
||||||
// Environments can be used to differentiate settings and resources between
|
// Environments can be used to differentiate settings and resources between
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
my_first_job:
|
||||||
|
id: 1
|
|
@ -1,4 +0,0 @@
|
||||||
resources:
|
|
||||||
workflows:
|
|
||||||
my_first_workflow:
|
|
||||||
id: 1
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
my_second_job:
|
||||||
|
id: 2
|
|
@ -1,4 +0,0 @@
|
||||||
resources:
|
|
||||||
workflows:
|
|
||||||
my_second_workflow:
|
|
||||||
id: 2
|
|
|
@ -11,10 +11,10 @@ import (
|
||||||
func TestIncludeDefault(t *testing.T) {
|
func TestIncludeDefault(t *testing.T) {
|
||||||
b := load(t, "./include_default")
|
b := load(t, "./include_default")
|
||||||
|
|
||||||
// Test that both workflows were loaded.
|
// Test that both jobs were loaded.
|
||||||
keys := maps.Keys(b.Config.Resources.Workflows)
|
keys := maps.Keys(b.Config.Resources.Jobs)
|
||||||
sort.Strings(keys)
|
sort.Strings(keys)
|
||||||
assert.Equal(t, []string{"my_first_workflow", "my_second_workflow"}, keys)
|
assert.Equal(t, []string{"my_first_job", "my_second_job"}, keys)
|
||||||
assert.Equal(t, "1", b.Config.Resources.Workflows["my_first_workflow"].ID)
|
assert.Equal(t, "1", b.Config.Resources.Jobs["my_first_job"].ID)
|
||||||
assert.Equal(t, "2", b.Config.Resources.Workflows["my_second_workflow"].ID)
|
assert.Equal(t, "2", b.Config.Resources.Jobs["my_second_job"].ID)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
resources:
|
resources:
|
||||||
workflows:
|
jobs:
|
||||||
this_workflow_isnt_defined:
|
this_job_isnt_defined:
|
||||||
id: 1
|
id: 1
|
||||||
|
|
|
@ -8,5 +8,5 @@ import (
|
||||||
|
|
||||||
func TestIncludeOverride(t *testing.T) {
|
func TestIncludeOverride(t *testing.T) {
|
||||||
b := load(t, "./include_override")
|
b := load(t, "./include_override")
|
||||||
assert.Empty(t, b.Config.Resources.Workflows)
|
assert.Empty(t, b.Config.Resources.Jobs)
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ environments:
|
||||||
development: false
|
development: false
|
||||||
photon: true
|
photon: true
|
||||||
|
|
||||||
workflows:
|
jobs:
|
||||||
pipeline_schedule:
|
pipeline_schedule:
|
||||||
name: Daily refresh of production pipeline
|
name: Daily refresh of production pipeline
|
||||||
|
|
|
@ -8,8 +8,8 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestJobAndPipelineDevelopment(t *testing.T) {
|
func TestJobAndPipelineDevelopment(t *testing.T) {
|
||||||
b := loadEnvironment(t, "./workflow_and_pipeline", "development")
|
b := loadEnvironment(t, "./job_and_pipeline", "development")
|
||||||
assert.Len(t, b.Config.Resources.Workflows, 0)
|
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||||
|
|
||||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||||
|
@ -20,8 +20,8 @@ func TestJobAndPipelineDevelopment(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestJobAndPipelineStaging(t *testing.T) {
|
func TestJobAndPipelineStaging(t *testing.T) {
|
||||||
b := loadEnvironment(t, "./workflow_and_pipeline", "staging")
|
b := loadEnvironment(t, "./job_and_pipeline", "staging")
|
||||||
assert.Len(t, b.Config.Resources.Workflows, 0)
|
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||||
|
|
||||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||||
|
@ -32,8 +32,8 @@ func TestJobAndPipelineStaging(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestJobAndPipelineProduction(t *testing.T) {
|
func TestJobAndPipelineProduction(t *testing.T) {
|
||||||
b := loadEnvironment(t, "./workflow_and_pipeline", "production")
|
b := loadEnvironment(t, "./job_and_pipeline", "production")
|
||||||
assert.Len(t, b.Config.Resources.Workflows, 1)
|
assert.Len(t, b.Config.Resources.Jobs, 1)
|
||||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||||
|
|
||||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||||
|
@ -42,7 +42,7 @@ func TestJobAndPipelineProduction(t *testing.T) {
|
||||||
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
||||||
assert.Equal(t, "nyc_taxi_production", p.Target)
|
assert.Equal(t, "nyc_taxi_production", p.Target)
|
||||||
|
|
||||||
j := b.Config.Resources.Workflows["pipeline_schedule"]
|
j := b.Config.Resources.Jobs["pipeline_schedule"]
|
||||||
assert.Equal(t, "Daily refresh of production pipeline", j.Name)
|
assert.Equal(t, "Daily refresh of production pipeline", j.Name)
|
||||||
require.Len(t, j.Tasks, 1)
|
require.Len(t, j.Tasks, 1)
|
||||||
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)
|
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)
|
|
@ -2,8 +2,8 @@ bundle:
|
||||||
name: yaml_anchors
|
name: yaml_anchors
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
workflows:
|
jobs:
|
||||||
my_workflow:
|
my_job:
|
||||||
_: &common_cluster
|
_: &common_cluster
|
||||||
spark_version: "10.4.x-scala2.12"
|
spark_version: "10.4.x-scala2.12"
|
||||||
tasks:
|
tasks:
|
||||||
|
|
|
@ -9,9 +9,9 @@ import (
|
||||||
|
|
||||||
func TestYAMLAnchors(t *testing.T) {
|
func TestYAMLAnchors(t *testing.T) {
|
||||||
b := load(t, "./yaml_anchors")
|
b := load(t, "./yaml_anchors")
|
||||||
assert.Len(t, b.Config.Resources.Workflows, 1)
|
assert.Len(t, b.Config.Resources.Jobs, 1)
|
||||||
|
|
||||||
j := b.Config.Resources.Workflows["my_workflow"]
|
j := b.Config.Resources.Jobs["my_job"]
|
||||||
require.Len(t, j.Tasks, 2)
|
require.Len(t, j.Tasks, 2)
|
||||||
|
|
||||||
t0 := j.Tasks[0]
|
t0 := j.Tasks[0]
|
||||||
|
|
Loading…
Reference in New Issue