mirror of https://github.com/databricks/cli.git
Gather locations based on regexp patterns
This commit is contained in:
parent
6bec3deac2
commit
b55ce90ad0
|
@ -12,13 +12,6 @@
|
|||
3
|
||||
]
|
||||
],
|
||||
"bundle.name": [
|
||||
[
|
||||
0,
|
||||
2,
|
||||
9
|
||||
]
|
||||
],
|
||||
"include": [
|
||||
[
|
||||
0,
|
||||
|
@ -76,6 +69,18 @@
|
|||
11
|
||||
]
|
||||
],
|
||||
"resources.jobs.my_job.tasks[0]": [
|
||||
[
|
||||
2,
|
||||
6,
|
||||
11
|
||||
],
|
||||
[
|
||||
1,
|
||||
8,
|
||||
15
|
||||
]
|
||||
],
|
||||
"targets": [
|
||||
[
|
||||
0,
|
||||
|
|
|
@ -12,13 +12,6 @@
|
|||
3
|
||||
]
|
||||
],
|
||||
"bundle.name": [
|
||||
[
|
||||
0,
|
||||
2,
|
||||
9
|
||||
]
|
||||
],
|
||||
"include": [
|
||||
[
|
||||
0,
|
||||
|
@ -76,6 +69,18 @@
|
|||
11
|
||||
]
|
||||
],
|
||||
"resources.jobs.my_job.tasks[0]": [
|
||||
[
|
||||
2,
|
||||
6,
|
||||
11
|
||||
],
|
||||
[
|
||||
1,
|
||||
18,
|
||||
15
|
||||
]
|
||||
],
|
||||
"targets": [
|
||||
[
|
||||
0,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
|
||||
>>> $CLI bundle validate -t dev -o json --include-locations
|
||||
>>> [CLI] bundle validate -t dev -o json --include-locations
|
||||
|
||||
>>> $CLI bundle validate -t prod -o json --include-locations
|
||||
>>> [CLI] bundle validate -t prod -o json --include-locations
|
||||
|
|
|
@ -23,13 +23,8 @@ func (m *populateLocations) Name() string {
|
|||
func (m *populateLocations) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
locs, err := dynloc.Build(
|
||||
b.Config.Value(),
|
||||
|
||||
// Make all paths relative to the bundle root.
|
||||
dynloc.WithBasePath(b.BundleRootPath),
|
||||
|
||||
// Limit to maximum depth of 3.
|
||||
// The intent is to capture locations of all resources but not their configurations.
|
||||
dynloc.WithMaxDepth(3),
|
||||
b.BundleRootPath,
|
||||
)
|
||||
if err != nil {
|
||||
return diag.FromErr(err)
|
||||
|
|
|
@ -3,6 +3,7 @@ package dynloc
|
|||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"sort"
|
||||
|
||||
|
@ -34,29 +35,39 @@ type Locations struct {
|
|||
// map with locations as compact as possible.
|
||||
fileToIndex map[string]int
|
||||
|
||||
// maxDepth is the maximum depth of the [dyn.Path] keys in the [Locations] map.
|
||||
maxDepth int
|
||||
|
||||
// basePath is the base path used to compute relative paths.
|
||||
basePath string
|
||||
}
|
||||
|
||||
func (l *Locations) gatherLocations(v dyn.Value) (map[string][]dyn.Location, error) {
|
||||
locs := map[string][]dyn.Location{}
|
||||
patterns := []*regexp.Regexp{
|
||||
// Top level fields
|
||||
regexp.MustCompile(`^[^.]+$`),
|
||||
// Top level resources for all types (e.g. "resources.jobs" or "resources.jobs.my_job")
|
||||
regexp.MustCompile(`^resources\.[^.]+(\.[^.]+)?$`),
|
||||
// Job tasks (e.g. "resources.jobs.my_job.tasks[2]")
|
||||
regexp.MustCompile(`^resources\.[^.]+\.[^.]+\.tasks(\[\d+\])$`),
|
||||
}
|
||||
|
||||
_, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
// Skip the root value.
|
||||
if len(p) == 0 {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// Skip if the path depth exceeds the maximum depth.
|
||||
if l.maxDepth > 0 && len(p) > l.maxDepth {
|
||||
return v, dyn.ErrSkip
|
||||
// Only gather locations for paths that match the patterns.
|
||||
pathStr := p.String()
|
||||
for _, re := range patterns {
|
||||
if re.MatchString(pathStr) {
|
||||
locs[pathStr] = v.Locations()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
locs[p.String()] = v.Locations()
|
||||
return v, nil
|
||||
})
|
||||
|
||||
return locs, err
|
||||
}
|
||||
|
||||
|
@ -128,22 +139,8 @@ func (l *Locations) addLocation(path, file string, line, col int) error {
|
|||
// Option is a functional option for the [Build] function.
|
||||
type Option func(l *Locations)
|
||||
|
||||
// WithMaxDepth sets the maximum depth of the [dyn.Path] keys in the [Locations] map.
|
||||
func WithMaxDepth(depth int) Option {
|
||||
return func(l *Locations) {
|
||||
l.maxDepth = depth
|
||||
}
|
||||
}
|
||||
|
||||
// WithBasePath sets the base path used to compute relative paths.
|
||||
func WithBasePath(basePath string) Option {
|
||||
return func(l *Locations) {
|
||||
l.basePath = basePath
|
||||
}
|
||||
}
|
||||
|
||||
// Build constructs a [Locations] object from a [dyn.Value].
|
||||
func Build(v dyn.Value, opts ...Option) (Locations, error) {
|
||||
func Build(v dyn.Value, basePath string) (Locations, error) {
|
||||
l := Locations{
|
||||
Version: Version,
|
||||
Files: make([]string, 0),
|
||||
|
@ -151,11 +148,7 @@ func Build(v dyn.Value, opts ...Option) (Locations, error) {
|
|||
|
||||
// Internal state.
|
||||
fileToIndex: make(map[string]int),
|
||||
}
|
||||
|
||||
// Apply options.
|
||||
for _, opt := range opts {
|
||||
opt(&l)
|
||||
basePath: basePath,
|
||||
}
|
||||
|
||||
// Traverse the value and collect locations.
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
package dynloc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
assert "github.com/databricks/cli/libs/dyn/dynassert"
|
||||
"github.com/databricks/cli/libs/dyn/merge"
|
||||
"github.com/databricks/cli/libs/dyn/yamlloader"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func load(t *testing.T, path string) dyn.Value {
|
||||
matches, err := filepath.Glob(path + "/*.yml")
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, matches)
|
||||
|
||||
// Load all files.
|
||||
vout := dyn.NilValue
|
||||
for _, match := range matches {
|
||||
buf, err := os.ReadFile(match)
|
||||
require.NoError(t, err)
|
||||
|
||||
v, err := yamlloader.LoadYAML(match, bytes.NewBuffer(buf))
|
||||
require.NoError(t, err)
|
||||
|
||||
vout, err = merge.Merge(vout, v)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
return vout
|
||||
}
|
||||
|
||||
func TestLocations_Default(t *testing.T) {
|
||||
v := load(t, "testdata/default")
|
||||
locs, err := Build(v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, locs.Version)
|
||||
assert.Equal(t, []string{"testdata/default/a.yml", "testdata/default/b.yml"}, locs.Files)
|
||||
assert.Equal(t, map[string][][]int{
|
||||
"a": {{0, 2, 3}},
|
||||
"a.b": {{0, 2, 6}},
|
||||
"b": {{1, 2, 3}},
|
||||
"b.c": {{1, 2, 6}},
|
||||
}, locs.Locations)
|
||||
}
|
||||
|
||||
func TestLocations_DefaultWithBasePath(t *testing.T) {
|
||||
v := load(t, "testdata/default")
|
||||
locs, err := Build(v, WithBasePath("testdata/default"))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, locs.Version)
|
||||
assert.Equal(t, []string{"a.yml", "b.yml"}, locs.Files)
|
||||
assert.Equal(t, map[string][][]int{
|
||||
"a": {{0, 2, 3}},
|
||||
"a.b": {{0, 2, 6}},
|
||||
"b": {{1, 2, 3}},
|
||||
"b.c": {{1, 2, 6}},
|
||||
}, locs.Locations)
|
||||
}
|
||||
|
||||
func TestLocations_Override(t *testing.T) {
|
||||
v := load(t, "testdata/override")
|
||||
locs, err := Build(v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, locs.Version)
|
||||
assert.Equal(t, []string{"testdata/override/a.yml", "testdata/override/b.yml"}, locs.Files)
|
||||
|
||||
// Note: specific ordering of locations is described in [merge.Merge].
|
||||
assert.Equal(t, map[string][][]int{
|
||||
"a": {
|
||||
{0, 2, 3},
|
||||
{1, 2, 3},
|
||||
},
|
||||
"a.b": {
|
||||
{1, 2, 6},
|
||||
{0, 2, 6},
|
||||
},
|
||||
}, locs.Locations)
|
||||
}
|
||||
|
||||
func TestLocations_MaxDepth(t *testing.T) {
|
||||
v := load(t, "testdata/depth")
|
||||
|
||||
var locs Locations
|
||||
var err error
|
||||
|
||||
// Test with no max depth.
|
||||
locs, err = Build(v)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, locs.Locations, 5)
|
||||
|
||||
// Test with max depth and see that the number of locations is reduced.
|
||||
locs, err = Build(v, WithMaxDepth(3))
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, locs.Locations, 3)
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
a:
|
||||
b: 42
|
|
@ -1,2 +0,0 @@
|
|||
b:
|
||||
c: 43
|
|
@ -1,5 +0,0 @@
|
|||
a:
|
||||
b:
|
||||
c:
|
||||
d:
|
||||
e: 42
|
|
@ -1,2 +0,0 @@
|
|||
a:
|
||||
b: 42
|
|
@ -1,2 +0,0 @@
|
|||
a:
|
||||
b: 43
|
Loading…
Reference in New Issue