Report all empty resources present in error diagnostic (#1685)

## Changes
This PR addressed post-merge feedback from
https://github.com/databricks/cli/pull/1673.

## Tests
Unit tests, and manually.
```
Error: experiment undefined-experiment is not defined
  at resources.experiments.undefined-experiment
  in databricks.yml:11:26

Error: job undefined-job is not defined
  at resources.jobs.undefined-job
  in databricks.yml:6:19

Error: pipeline undefined-pipeline is not defined
  at resources.pipelines.undefined-pipeline
  in databricks.yml:14:24

Name: undefined-job
Target: default

Found 3 errors
```
This commit is contained in:
shreyas-goenka 2024-08-20 05:52:00 +05:30 committed by GitHub
parent 78d0ac5c6a
commit 242d4b51ed
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 90 additions and 63 deletions

View File

@ -3,6 +3,7 @@ package validate
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -21,27 +22,36 @@ func (m *allResourcesHaveValues) Name() string {
} }
func (m *allResourcesHaveValues) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *allResourcesHaveValues) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
rv := b.Config.Value().Get("resources") diags := diag.Diagnostics{}
// Skip if there are no resources block defined, or the resources block is empty.
if rv.Kind() == dyn.KindInvalid || rv.Kind() == dyn.KindNil {
return nil
}
_, err := dyn.MapByPattern( _, err := dyn.MapByPattern(
rv, b.Config.Value(),
dyn.NewPattern(dyn.AnyKey(), dyn.AnyKey()), dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()),
func(p dyn.Path, v dyn.Value) (dyn.Value, error) { func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { if v.Kind() != dyn.KindNil {
// Type of the resource, stripped of the trailing 's' to make it return v, nil
// singular.
rType := strings.TrimSuffix(p[0].Key(), "s")
rName := p[1].Key()
return v, fmt.Errorf("%s %s is not defined", rType, rName)
} }
// Type of the resource, stripped of the trailing 's' to make it
// singular.
rType := strings.TrimSuffix(p[1].Key(), "s")
// Name of the resource. Eg: "foo" in "jobs.foo".
rName := p[2].Key()
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("%s %s is not defined", rType, rName),
Locations: v.Locations(),
Paths: []dyn.Path{slices.Clone(p)},
})
return v, nil return v, nil
}, },
) )
return diag.FromErr(err) if err != nil {
diags = append(diags, diag.FromErr(err)...)
}
return diags
} }

View File

@ -1,7 +1,6 @@
package config_tests package config_tests
import ( import (
"path/filepath"
"testing" "testing"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
@ -15,8 +14,6 @@ func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1) assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
l := b.Config.GetLocation("resources.pipelines.nyc_taxi_pipeline")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(l.File))
assert.Equal(t, b.Config.Bundle.Mode, config.Development) assert.Equal(t, b.Config.Bundle.Mode, config.Development)
assert.True(t, p.Development) assert.True(t, p.Development)
require.Len(t, p.Libraries, 1) require.Len(t, p.Libraries, 1)
@ -30,8 +27,6 @@ func TestJobAndPipelineStagingWithEnvironment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1) assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
l := b.Config.GetLocation("resources.pipelines.nyc_taxi_pipeline")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(l.File))
assert.False(t, p.Development) assert.False(t, p.Development)
require.Len(t, p.Libraries, 1) require.Len(t, p.Libraries, 1)
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
@ -44,16 +39,12 @@ func TestJobAndPipelineProductionWithEnvironment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1) assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
pl := b.Config.GetLocation("resources.pipelines.nyc_taxi_pipeline")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(pl.File))
assert.False(t, p.Development) assert.False(t, p.Development)
require.Len(t, p.Libraries, 1) require.Len(t, p.Libraries, 1)
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
assert.Equal(t, "nyc_taxi_production", p.Target) assert.Equal(t, "nyc_taxi_production", p.Target)
j := b.Config.Resources.Jobs["pipeline_schedule"] j := b.Config.Resources.Jobs["pipeline_schedule"]
jl := b.Config.GetLocation("resources.jobs.pipeline_schedule")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(jl.File))
assert.Equal(t, "Daily refresh of production pipeline", j.Name) assert.Equal(t, "Daily refresh of production pipeline", j.Name)
require.Len(t, j.Tasks, 1) require.Len(t, j.Tasks, 1)
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId) assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)

View File

@ -1,8 +0,0 @@
bundle:
name: undefined-job
resources:
jobs:
undefined:
test:
name: "Test Job"

View File

@ -1,22 +0,0 @@
package config_tests
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/validate"
"github.com/stretchr/testify/assert"
)
func TestUndefinedJobLoadsWithError(t *testing.T) {
b := load(t, "./undefined_job")
diags := bundle.Apply(context.Background(), b, validate.AllResourcesHaveValues())
assert.ErrorContains(t, diags.Error(), "job undefined is not defined")
}
func TestUndefinedPipelineLoadsWithError(t *testing.T) {
b := load(t, "./undefined_pipeline")
diags := bundle.Apply(context.Background(), b, validate.AllResourcesHaveValues())
assert.ErrorContains(t, diags.Error(), "pipeline undefined is not defined")
}

View File

@ -1,8 +0,0 @@
bundle:
name: undefined-pipeline
resources:
pipelines:
undefined:
test:
name: "Test Pipeline"

View File

@ -0,0 +1,14 @@
bundle:
name: undefined-job
resources:
jobs:
undefined-job:
test:
name: "Test Job"
experiments:
undefined-experiment:
pipelines:
undefined-pipeline:

View File

@ -0,0 +1,50 @@
package config_tests
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert"
)
func TestUndefinedResourcesLoadWithError(t *testing.T) {
b := load(t, "./undefined_resources")
diags := bundle.Apply(context.Background(), b, validate.AllResourcesHaveValues())
assert.Len(t, diags, 3)
assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "job undefined-job is not defined",
Locations: []dyn.Location{{
File: filepath.FromSlash("undefined_resources/databricks.yml"),
Line: 6,
Column: 19,
}},
Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.undefined-job")},
})
assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "experiment undefined-experiment is not defined",
Locations: []dyn.Location{{
File: filepath.FromSlash("undefined_resources/databricks.yml"),
Line: 11,
Column: 26,
}},
Paths: []dyn.Path{dyn.MustPathFromString("resources.experiments.undefined-experiment")},
})
assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "pipeline undefined-pipeline is not defined",
Locations: []dyn.Location{{
File: filepath.FromSlash("undefined_resources/databricks.yml"),
Line: 14,
Column: 24,
}},
Paths: []dyn.Path{dyn.MustPathFromString("resources.pipelines.undefined-pipeline")},
})
}