Add flag to validate and summary commands to include location information

The (hidden) `--include-locations` flag instructs commands to include location
information for the configuration tree in the JSON output (if selected). Paths
to configuration files are relative to the bundle root directory.
This commit is contained in:
Pieter Noordhuis 2025-01-17 17:21:26 +01:00
parent 7034793d1d
commit cc9a05663a
No known key found for this signature in database
GPG Key ID: 12ACCCC104CF2930
13 changed files with 355 additions and 5 deletions

View File

@ -0,0 +1,10 @@
bundle:
name: include_locations
include:
- job.yml
- override.yml
targets:
dev: {}
prod: {}

View File

@ -0,0 +1,8 @@
resources:
jobs:
my_job:
name: name
tasks:
- task_key: default
notebook_task:
notebook_path: ./src/notebook.py

View File

@ -0,0 +1,100 @@
{
"bundle": [
{
"column": 3,
"file": "databricks.yml",
"line": 2
}
],
"bundle.name": [
{
"column": 9,
"file": "databricks.yml",
"line": 2
}
],
"include": [
{
"column": 3,
"file": "databricks.yml",
"line": 5
}
],
"include[0]": [
{
"column": 5,
"file": "databricks.yml",
"line": 5
}
],
"include[1]": [
{
"column": 5,
"file": "databricks.yml",
"line": 6
}
],
"resources": [
{
"column": 3,
"file": "job.yml",
"line": 2
}
],
"resources.jobs": [
{
"column": 5,
"file": "job.yml",
"line": 3
}
],
"resources.jobs.my_job": [
{
"column": 7,
"file": "job.yml",
"line": 4
}
],
"resources.jobs.my_job.name": [
{
"column": 17,
"file": "override.yml",
"line": 6
}
],
"resources.jobs.my_job.tasks": [
{
"column": 9,
"file": "job.yml",
"line": 6
}
],
"resources.jobs.my_job.tasks[0]": [
{
"column": 11,
"file": "job.yml",
"line": 6
}
],
"resources.jobs.my_job.tasks[0].notebook_task": [
{
"column": 13,
"file": "job.yml",
"line": 8
}
],
"resources.jobs.my_job.tasks[0].notebook_task.notebook_path": [
{
"column": 32,
"file": "override.yml",
"line": 10
}
],
"targets": [
{
"column": 3,
"file": "databricks.yml",
"line": 9
}
]
}

View File

@ -0,0 +1,100 @@
{
"bundle": [
{
"column": 3,
"file": "databricks.yml",
"line": 2
}
],
"bundle.name": [
{
"column": 9,
"file": "databricks.yml",
"line": 2
}
],
"include": [
{
"column": 3,
"file": "databricks.yml",
"line": 5
}
],
"include[0]": [
{
"column": 5,
"file": "databricks.yml",
"line": 5
}
],
"include[1]": [
{
"column": 5,
"file": "databricks.yml",
"line": 6
}
],
"resources": [
{
"column": 3,
"file": "job.yml",
"line": 2
}
],
"resources.jobs": [
{
"column": 5,
"file": "job.yml",
"line": 3
}
],
"resources.jobs.my_job": [
{
"column": 7,
"file": "job.yml",
"line": 4
}
],
"resources.jobs.my_job.name": [
{
"column": 17,
"file": "override.yml",
"line": 16
}
],
"resources.jobs.my_job.tasks": [
{
"column": 9,
"file": "job.yml",
"line": 6
}
],
"resources.jobs.my_job.tasks[0]": [
{
"column": 11,
"file": "job.yml",
"line": 6
}
],
"resources.jobs.my_job.tasks[0].notebook_task": [
{
"column": 13,
"file": "job.yml",
"line": 8
}
],
"resources.jobs.my_job.tasks[0].notebook_task.notebook_path": [
{
"column": 32,
"file": "override.yml",
"line": 20
}
],
"targets": [
{
"column": 3,
"file": "databricks.yml",
"line": 9
}
]
}

View File

@ -0,0 +1,4 @@
>>> $CLI bundle validate -t dev -o json --include-locations
>>> $CLI bundle validate -t prod -o json --include-locations

View File

@ -0,0 +1,20 @@
targets:
dev:
resources:
jobs:
my_job:
name: dev name
tasks:
- task_key: default
notebook_task:
notebook_path: ./src/dev.py
prod:
resources:
jobs:
my_job:
name: prod name
tasks:
- task_key: default
notebook_task:
notebook_path: ./src/prod.py

View File

@ -0,0 +1,2 @@
trace $CLI bundle validate -t dev -o json --include-locations | jq .__locations > output.dev.json
trace $CLI bundle validate -t prod -o json --include-locations | jq .__locations > output.prod.json

View File

@ -0,0 +1,73 @@
package mutator
import (
"context"
"path/filepath"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
)
type populateLocations struct{}
// PopulateLocations collects location information for the entire configuration tree
// and includes this as the [config.Root.Locations] property.
func PopulateLocations() bundle.Mutator {
return &populateLocations{}
}
func (m *populateLocations) Name() string {
return "PopulateLocations"
}
func computeRelativeLocations(base string, v dyn.Value) []dyn.Location {
// Skip values that don't have locations.
// Examples include defaults or values that are set by the program itself.
locs := v.Locations()
if len(locs) == 0 {
return nil
}
// Convert absolute paths to relative paths.
for i := range locs {
rel, err := filepath.Rel(base, locs[i].File)
if err != nil {
return nil
}
// Convert the path separator to forward slashes.
// This makes it possible to compare output across platforms.
locs[i].File = filepath.ToSlash(rel)
}
return locs
}
func (m *populateLocations) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
loc := make(map[string][]dyn.Location)
_, err := dyn.Walk(b.Config.Value(), func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
// Skip the root value.
if len(p) == 0 {
return v, nil
}
// Skip values that don't have locations.
// Examples include defaults or values that are set by the program itself.
locs := computeRelativeLocations(b.BundleRootPath, v)
if len(locs) > 0 {
// Semantics for a value having multiple locations can be found in [merge.Merge].
// We don't need to externalize these at the moment, so we limit the number
// of locations to 1 while still using a slice for the output. This allows us
// to include multiple entries in the future if we need to.
loc[p.String()] = locs[0:1]
}
return v, nil
})
if err != nil {
return diag.FromErr(err)
}
b.Config.Locations = loc
return nil
}

View File

@ -69,6 +69,10 @@ type Root struct {
// Permissions section allows to define permissions which will be
// applied to all resources defined in bundle
Permissions []resources.Permission `json:"permissions,omitempty"`
// Locations is an output-only field that holds configuration location
// information for every path in the configuration tree.
Locations map[string][]dyn.Location `json:"__locations,omitempty" bundle:"internal"`
}
// Load loads the bundle configuration file at the specified path.

View File

@ -26,7 +26,10 @@ func newSummaryCommand() *cobra.Command {
}
var forcePull bool
var includeLocations bool
cmd.Flags().BoolVar(&forcePull, "force-pull", false, "Skip local cache and load the state from the remote workspace")
cmd.Flags().BoolVar(&includeLocations, "include-locations", false, "Include location information in the output")
cmd.Flags().MarkHidden("include-locations")
cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
@ -59,8 +62,16 @@ func newSummaryCommand() *cobra.Command {
}
}
diags = bundle.Apply(ctx, b,
bundle.Seq(terraform.Load(), mutator.InitializeURLs()))
diags = bundle.Apply(ctx, b, bundle.Seq(
terraform.Load(),
mutator.InitializeURLs(),
))
// Include location information in the output if the flag is set.
if includeLocations {
diags = diags.Extend(bundle.Apply(ctx, b, mutator.PopulateLocations()))
}
if err := diags.Error(); err != nil {
return err
}

View File

@ -6,6 +6,7 @@ import (
"fmt"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/bundle/render"
@ -31,6 +32,10 @@ func newValidateCommand() *cobra.Command {
Args: root.NoArgs,
}
var includeLocations bool
cmd.Flags().BoolVar(&includeLocations, "include-locations", false, "Include location information in the output")
cmd.Flags().MarkHidden("include-locations")
cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
b, diags := utils.ConfigureBundleWithVariables(cmd)
@ -51,6 +56,11 @@ func newValidateCommand() *cobra.Command {
diags = diags.Extend(bundle.Apply(ctx, b, validate.Validate()))
}
// Include location information in the output if the flag is set.
if includeLocations {
diags = diags.Extend(bundle.Apply(ctx, b, mutator.PopulateLocations()))
}
switch root.OutputType(cmd) {
case flags.OutputText:
renderOpts := render.RenderOptions{RenderSummaryTable: true}

View File

@ -7,9 +7,9 @@ import (
)
type Location struct {
File string
Line int
Column int
File string `json:"file"`
Line int `json:"line"`
Column int `json:"column"`
}
func (l Location) String() string {

View File

@ -1,6 +1,7 @@
package dyn_test
import (
"encoding/json"
"testing"
"github.com/databricks/cli/libs/dyn"
@ -24,3 +25,10 @@ func TestLocationDirectoryNoFile(t *testing.T) {
_, err := loc.Directory()
assert.Error(t, err)
}
func TestLocationMarshal(t *testing.T) {
loc := dyn.Location{File: "file", Line: 1, Column: 2}
buf, err := json.Marshal(loc)
assert.NoError(t, err)
assert.Equal(t, `{"file":"file","line":1,"column":2}`, string(buf))
}