Compare commits

...

3 Commits

Author SHA1 Message Date
Pieter Noordhuis 4f31f3a575
Update import 2025-01-28 12:47:38 +01:00
Pieter Noordhuis 8f2c3309e6
Refactor to ensure stable ordering 2025-01-28 12:42:13 +01:00
Pieter Noordhuis 75f1efcb91
Move configuration file 2025-01-27 17:32:11 +01:00
6 changed files with 104 additions and 43 deletions

View File

@ -2,7 +2,7 @@ bundle:
name: include_locations name: include_locations
include: include:
- job.yml - resources/*.yml
- override.yml - override.yml
targets: targets:

View File

@ -1,8 +1,8 @@
{ {
"files": [ "files": [
"databricks.yml", "databricks.yml",
"job.yml", "override.yml",
"override.yml" "resources/job.yml"
], ],
"locations": { "locations": {
"bundle": [ "bundle": [
@ -42,36 +42,36 @@
], ],
"resources": [ "resources": [
[ [
1, 2,
2, 2,
3 3
], ],
[ [
2, 1,
4, 4,
7 7
] ]
], ],
"resources.jobs": [ "resources.jobs": [
[ [
1, 2,
3, 3,
5 5
], ],
[ [
2, 1,
5, 5,
9 9
] ]
], ],
"resources.jobs.my_job": [ "resources.jobs.my_job": [
[ [
1, 2,
4, 4,
7 7
], ],
[ [
2, 1,
6, 6,
11 11
] ]
@ -83,7 +83,7 @@
3 3
], ],
[ [
2, 1,
2, 2,
3 3
] ]

View File

@ -1,8 +1,8 @@
{ {
"files": [ "files": [
"databricks.yml", "databricks.yml",
"job.yml", "override.yml",
"override.yml" "resources/job.yml"
], ],
"locations": { "locations": {
"bundle": [ "bundle": [
@ -42,36 +42,36 @@
], ],
"resources": [ "resources": [
[ [
1, 2,
2, 2,
3 3
], ],
[ [
2, 1,
14, 14,
7 7
] ]
], ],
"resources.jobs": [ "resources.jobs": [
[ [
1, 2,
3, 3,
5 5
], ],
[ [
2, 1,
15, 15,
9 9
] ]
], ],
"resources.jobs.my_job": [ "resources.jobs.my_job": [
[ [
1, 2,
4, 4,
7 7
], ],
[ [
2, 1,
16, 16,
11 11
] ]
@ -83,7 +83,7 @@
3 3
], ],
[ [
2, 1,
2, 2,
3 3
] ]

View File

@ -1,9 +1,13 @@
package dynloc package dynloc
import ( import (
"fmt"
"path/filepath" "path/filepath"
"slices"
"sort"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"golang.org/x/exp/maps"
) )
const ( const (
@ -37,28 +41,84 @@ type Locations struct {
basePath string basePath string
} }
func (l *Locations) addLocation(p dyn.Path, file string, line, col int) error { func (l *Locations) gatherLocations(v dyn.Value) (map[string][]dyn.Location, error) {
locs := map[string][]dyn.Location{}
_, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
// Skip the root value.
if len(p) == 0 {
return v, nil
}
// Skip if the path depth exceeds the maximum depth.
if l.maxDepth > 0 && len(p) > l.maxDepth {
return v, dyn.ErrSkip
}
locs[p.String()] = v.Locations()
return v, nil
})
return locs, err
}
func (l *Locations) normalizeFilePath(file string) (string, error) {
var err error var err error
// Compute the relative path. The base path may be empty. // Compute the relative path. The base path may be empty.
file, err = filepath.Rel(l.basePath, file) file, err = filepath.Rel(l.basePath, file)
if err != nil { if err != nil {
return err return "", err
} }
// Convert the path separator to forward slashes. // Convert the path separator to forward slashes.
// This makes it possible to compare output across platforms. // This makes it possible to compare output across platforms.
file = filepath.ToSlash(file) return filepath.ToSlash(file), nil
}
// If the file is not yet in the list, add it. func (l *Locations) registerFileNames(locs []dyn.Location) error {
cache := map[string]string{}
for _, loc := range locs {
// Never process the same file path twice.
if _, ok := cache[loc.File]; ok {
continue
}
// Normalize the file path.
out, err := l.normalizeFilePath(loc.File)
if err != nil {
return err
}
// Cache the normalized path.
cache[loc.File] = out
}
l.Files = maps.Values(cache)
sort.Strings(l.Files)
// Build the file-to-index map.
for i, file := range l.Files {
l.fileToIndex[file] = i
}
// Add entries for the original file path.
// Doing this means we can perform the lookup with the verbatim file path.
for k, v := range cache {
l.fileToIndex[k] = l.fileToIndex[v]
}
return nil
}
func (l *Locations) addLocation(path, file string, line, col int) error {
// Expect the file to be present in the lookup map.
if _, ok := l.fileToIndex[file]; !ok { if _, ok := l.fileToIndex[file]; !ok {
l.fileToIndex[file] = len(l.Files) // This indicates a logic problem below, but we rather not panic.
l.Files = append(l.Files, file) return fmt.Errorf("dynloc: unknown file %q", file)
} }
// Add the location to the map. // Add the location to the map.
l.Locations[p.String()] = append( l.Locations[path] = append(
l.Locations[p.String()], l.Locations[path],
[]int{l.fileToIndex[file], line, col}, []int{l.fileToIndex[file], line, col},
) )
@ -99,26 +159,27 @@ func Build(v dyn.Value, opts ...Option) (Locations, error) {
} }
// Traverse the value and collect locations. // Traverse the value and collect locations.
_, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { pathToLocations, err := l.gatherLocations(v)
// Skip the root value. if err != nil {
if len(p) == 0 { return l, err
return v, nil }
}
// Skip if the path depth exceeds the maximum depth. // Normalize file paths and add locations.
if l.maxDepth > 0 && len(p) > l.maxDepth { // This step adds files to the [Files] array in alphabetical order.
return v, dyn.ErrSkip err = l.registerFileNames(slices.Concat(maps.Values(pathToLocations)...))
} if err != nil {
return l, err
}
for _, loc := range v.Locations() { // Add locations to the map.
err := l.addLocation(p, loc.File, loc.Line, loc.Column) for path, locs := range pathToLocations {
for _, loc := range locs {
err = l.addLocation(path, loc.File, loc.Line, loc.Column)
if err != nil { if err != nil {
return dyn.InvalidValue, err return l, err
} }
} }
}
return v, nil
})
return l, err return l, err
} }

View File

@ -7,9 +7,9 @@ import (
"testing" "testing"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/libs/dyn/yamlloader" "github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )