2023-09-04 09:55:01 +00:00
|
|
|
package mutator
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2024-03-18 16:23:39 +00:00
|
|
|
"slices"
|
2023-09-04 09:55:01 +00:00
|
|
|
|
|
|
|
"github.com/databricks/cli/bundle"
|
2024-03-18 16:23:39 +00:00
|
|
|
"github.com/databricks/cli/libs/dyn"
|
2023-09-04 09:55:01 +00:00
|
|
|
)
|
|
|
|
|
2024-03-18 16:23:39 +00:00
|
|
|
type jobTaskRewritePattern struct {
|
|
|
|
pattern dyn.Pattern
|
|
|
|
fn rewriteFunc
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
|
|
|
|
2024-03-18 16:23:39 +00:00
|
|
|
func rewritePatterns(base dyn.Pattern) []jobTaskRewritePattern {
|
|
|
|
return []jobTaskRewritePattern{
|
|
|
|
{
|
|
|
|
base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")),
|
|
|
|
translateNotebookPath,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")),
|
|
|
|
translateFilePath,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")),
|
|
|
|
translateDirectoryPath,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")),
|
|
|
|
translateFilePath,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")),
|
|
|
|
translateNoOp,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")),
|
|
|
|
translateNoOp,
|
|
|
|
},
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-18 16:23:39 +00:00
|
|
|
func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) {
|
|
|
|
var fallback = make(map[string]string)
|
|
|
|
var ignore []string
|
|
|
|
var err error
|
2023-09-04 09:55:01 +00:00
|
|
|
|
|
|
|
for key, job := range b.Config.Resources.Jobs {
|
|
|
|
dir, err := job.ConfigFileDirectory()
|
|
|
|
if err != nil {
|
2024-03-18 16:23:39 +00:00
|
|
|
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for job %s: %w", key, err)
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
|
|
|
|
2024-03-18 16:23:39 +00:00
|
|
|
// If we cannot resolve the relative path using the [dyn.Value] location itself,
|
|
|
|
// use the job's location as fallback. This is necessary for backwards compatibility.
|
|
|
|
fallback[key] = dir
|
|
|
|
|
2023-09-04 09:55:01 +00:00
|
|
|
// Do not translate job task paths if using git source
|
|
|
|
if job.GitSource != nil {
|
2024-03-18 16:23:39 +00:00
|
|
|
ignore = append(ignore, key)
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
2024-03-18 16:23:39 +00:00
|
|
|
}
|
2023-09-04 09:55:01 +00:00
|
|
|
|
2024-03-18 16:23:39 +00:00
|
|
|
// Base pattern to match all tasks in all jobs.
|
|
|
|
base := dyn.NewPattern(
|
|
|
|
dyn.Key("resources"),
|
|
|
|
dyn.Key("jobs"),
|
|
|
|
dyn.AnyKey(),
|
|
|
|
dyn.Key("tasks"),
|
|
|
|
dyn.AnyIndex(),
|
|
|
|
)
|
|
|
|
|
|
|
|
// Compile list of patterns and their respective rewrite functions.
|
|
|
|
taskPatterns := rewritePatterns(base)
|
|
|
|
forEachPatterns := rewritePatterns(base.Append(dyn.Key("for_each_task"), dyn.Key("task")))
|
|
|
|
allPatterns := append(taskPatterns, forEachPatterns...)
|
|
|
|
|
|
|
|
for _, t := range allPatterns {
|
|
|
|
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
|
|
|
key := p[2].Key()
|
|
|
|
|
|
|
|
// Skip path translation if the job is using git source.
|
|
|
|
if slices.Contains(ignore, key) {
|
|
|
|
return v, nil
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
2024-03-18 16:23:39 +00:00
|
|
|
|
|
|
|
dir, err := v.Location().Directory()
|
|
|
|
if err != nil {
|
|
|
|
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for job %s: %w", key, err)
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
2024-03-18 16:23:39 +00:00
|
|
|
|
|
|
|
return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key])
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return dyn.InvalidValue, err
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-18 16:23:39 +00:00
|
|
|
return v, nil
|
2023-09-04 09:55:01 +00:00
|
|
|
}
|