Stop tracking file path locations in bundle resources (#1673)

## Changes
Since locations are already tracked in the dynamic value tree, we no
longer need to track it at the resource/artifact level. This PR:
1. Removes use of `paths.Paths`. Uses dyn.Location instead.
2. Refactors the validation of resources not being empty valued to be
generic across all resource types.
  
## Tests
Existing unit tests.
This commit is contained in:
shreyas-goenka 2024-08-13 18:20:15 +05:30 committed by GitHub
parent ad8e61c739
commit 7ae80de351
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 98 additions and 243 deletions

View File

@ -34,11 +34,13 @@ func (m *prepare) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics
return diag.Errorf("artifact doesn't exist: %s", m.name)
}
l := b.Config.GetLocation("artifacts." + m.name)
dirPath := filepath.Dir(l.File)
// Check if source paths are absolute, if not, make them absolute
for k := range artifact.Files {
f := &artifact.Files[k]
if !filepath.IsAbs(f.Source) {
dirPath := filepath.Dir(artifact.ConfigFilePath)
f.Source = filepath.Join(dirPath, f.Source)
}
}
@ -49,7 +51,6 @@ func (m *prepare) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics
}
if !filepath.IsAbs(artifact.Path) {
dirPath := filepath.Dir(artifact.ConfigFilePath)
artifact.Path = filepath.Join(dirPath, artifact.Path)
}

View File

@ -4,18 +4,11 @@ import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/exec"
)
type Artifacts map[string]*Artifact
func (artifacts Artifacts) ConfigureConfigFilePath() {
for _, artifact := range artifacts {
artifact.ConfigureConfigFilePath()
}
}
type ArtifactType string
const ArtifactPythonWheel ArtifactType = `whl`
@ -40,8 +33,6 @@ type Artifact struct {
BuildCommand string `json:"build,omitempty"`
Executable exec.ExecutableType `json:"executable,omitempty"`
paths.Paths
}
func (a *Artifact) Build(ctx context.Context) ([]byte, error) {

View File

@ -9,7 +9,6 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/require"
@ -65,9 +64,6 @@ func TestGenerateTrampoline(t *testing.T) {
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test": {
Paths: paths.Paths{
ConfigFilePath: tmpDir,
},
JobSettings: &jobs.JobSettings{
Tasks: tasks,
},

View File

@ -1,22 +0,0 @@
package paths
import (
"github.com/databricks/cli/libs/dyn"
)
type Paths struct {
// Absolute path on the local file system to the configuration file that holds
// the definition of this resource.
ConfigFilePath string `json:"-" bundle:"readonly"`
// DynamicValue stores the [dyn.Value] of the containing struct.
// This assumes that this struct is always embedded.
DynamicValue dyn.Value `json:"-"`
}
func (p *Paths) ConfigureConfigFilePath() {
if !p.DynamicValue.IsValid() {
panic("DynamicValue not set")
}
p.ConfigFilePath = p.DynamicValue.Location().File
}

View File

@ -21,81 +21,14 @@ type Resources struct {
Schemas map[string]*resources.Schema `json:"schemas,omitempty"`
}
type resource struct {
resource ConfigResource
resource_type string
key string
}
func (r *Resources) allResources() []resource {
all := make([]resource, 0)
for k, e := range r.Jobs {
all = append(all, resource{resource_type: "job", resource: e, key: k})
}
for k, e := range r.Pipelines {
all = append(all, resource{resource_type: "pipeline", resource: e, key: k})
}
for k, e := range r.Models {
all = append(all, resource{resource_type: "model", resource: e, key: k})
}
for k, e := range r.Experiments {
all = append(all, resource{resource_type: "experiment", resource: e, key: k})
}
for k, e := range r.ModelServingEndpoints {
all = append(all, resource{resource_type: "serving endpoint", resource: e, key: k})
}
for k, e := range r.RegisteredModels {
all = append(all, resource{resource_type: "registered model", resource: e, key: k})
}
for k, e := range r.QualityMonitors {
all = append(all, resource{resource_type: "quality monitor", resource: e, key: k})
}
return all
}
func (r *Resources) VerifyAllResourcesDefined() error {
all := r.allResources()
for _, e := range all {
err := e.resource.Validate()
if err != nil {
return fmt.Errorf("%s %s is not defined", e.resource_type, e.key)
}
}
return nil
}
// ConfigureConfigFilePath sets the specified path for all resources contained in this instance.
// This property is used to correctly resolve paths relative to the path
// of the configuration file they were defined in.
func (r *Resources) ConfigureConfigFilePath() {
for _, e := range r.Jobs {
e.ConfigureConfigFilePath()
}
for _, e := range r.Pipelines {
e.ConfigureConfigFilePath()
}
for _, e := range r.Models {
e.ConfigureConfigFilePath()
}
for _, e := range r.Experiments {
e.ConfigureConfigFilePath()
}
for _, e := range r.ModelServingEndpoints {
e.ConfigureConfigFilePath()
}
for _, e := range r.RegisteredModels {
e.ConfigureConfigFilePath()
}
for _, e := range r.QualityMonitors {
e.ConfigureConfigFilePath()
}
}
type ConfigResource interface {
// Function to assert if the resource exists in the workspace configured in
// the input workspace client.
Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error)
// Terraform equivalent name of the resource. For example "databricks_job"
// for jobs and "databricks_pipeline" for pipelines.
TerraformResourceName() string
Validate() error
}
func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) {

View File

@ -2,10 +2,8 @@ package resources
import (
"context"
"fmt"
"strconv"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -17,8 +15,6 @@ type Job struct {
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
paths.Paths
*jobs.JobSettings
}
@ -48,11 +44,3 @@ func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id stri
func (j *Job) TerraformResourceName() string {
return "databricks_job"
}
func (j *Job) Validate() error {
if j == nil || !j.DynamicValue.IsValid() || j.JobSettings == nil {
return fmt.Errorf("job is not defined")
}
return nil
}

View File

@ -2,9 +2,7 @@ package resources
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -16,8 +14,6 @@ type MlflowExperiment struct {
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
paths.Paths
*ml.Experiment
}
@ -43,11 +39,3 @@ func (s *MlflowExperiment) Exists(ctx context.Context, w *databricks.WorkspaceCl
func (s *MlflowExperiment) TerraformResourceName() string {
return "databricks_mlflow_experiment"
}
func (s *MlflowExperiment) Validate() error {
if s == nil || !s.DynamicValue.IsValid() {
return fmt.Errorf("experiment is not defined")
}
return nil
}

View File

@ -2,9 +2,7 @@ package resources
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -16,8 +14,6 @@ type MlflowModel struct {
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
paths.Paths
*ml.Model
}
@ -43,11 +39,3 @@ func (s *MlflowModel) Exists(ctx context.Context, w *databricks.WorkspaceClient,
func (s *MlflowModel) TerraformResourceName() string {
return "databricks_mlflow_model"
}
func (s *MlflowModel) Validate() error {
if s == nil || !s.DynamicValue.IsValid() {
return fmt.Errorf("model is not defined")
}
return nil
}

View File

@ -2,9 +2,7 @@ package resources
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -20,10 +18,6 @@ type ModelServingEndpoint struct {
// as a reference in other resources. This value is returned by terraform.
ID string `json:"id,omitempty" bundle:"readonly"`
// Path to config file where the resource is defined. All bundle resources
// include this for interpolation purposes.
paths.Paths
// This is a resource agnostic implementation of permissions for ACLs.
// Implementation could be different based on the resource type.
Permissions []Permission `json:"permissions,omitempty"`
@ -53,11 +47,3 @@ func (s *ModelServingEndpoint) Exists(ctx context.Context, w *databricks.Workspa
func (s *ModelServingEndpoint) TerraformResourceName() string {
return "databricks_model_serving"
}
func (s *ModelServingEndpoint) Validate() error {
if s == nil || !s.DynamicValue.IsValid() {
return fmt.Errorf("serving endpoint is not defined")
}
return nil
}

View File

@ -2,9 +2,7 @@ package resources
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -16,8 +14,6 @@ type Pipeline struct {
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
paths.Paths
*pipelines.PipelineSpec
}
@ -43,11 +39,3 @@ func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id
func (p *Pipeline) TerraformResourceName() string {
return "databricks_pipeline"
}
func (p *Pipeline) Validate() error {
if p == nil || !p.DynamicValue.IsValid() {
return fmt.Errorf("pipeline is not defined")
}
return nil
}

View File

@ -2,9 +2,7 @@ package resources
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -21,10 +19,6 @@ type QualityMonitor struct {
// as a reference in other resources. This value is returned by terraform.
ID string `json:"id,omitempty" bundle:"readonly"`
// Path to config file where the resource is defined. All bundle resources
// include this for interpolation purposes.
paths.Paths
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
}
@ -50,11 +44,3 @@ func (s *QualityMonitor) Exists(ctx context.Context, w *databricks.WorkspaceClie
func (s *QualityMonitor) TerraformResourceName() string {
return "databricks_quality_monitor"
}
func (s *QualityMonitor) Validate() error {
if s == nil || !s.DynamicValue.IsValid() {
return fmt.Errorf("quality monitor is not defined")
}
return nil
}

View File

@ -2,9 +2,7 @@ package resources
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
@ -21,10 +19,6 @@ type RegisteredModel struct {
// as a reference in other resources. This value is returned by terraform.
ID string `json:"id,omitempty" bundle:"readonly"`
// Path to config file where the resource is defined. All bundle resources
// include this for interpolation purposes.
paths.Paths
// This represents the input args for terraform, and will get converted
// to a HCL representation for CRUD
*catalog.CreateRegisteredModelRequest
@ -54,11 +48,3 @@ func (s *RegisteredModel) Exists(ctx context.Context, w *databricks.WorkspaceCli
func (s *RegisteredModel) TerraformResourceName() string {
return "databricks_registered_model"
}
func (s *RegisteredModel) Validate() error {
if s == nil || !s.DynamicValue.IsValid() {
return fmt.Errorf("registered model is not defined")
}
return nil
}

View File

@ -136,17 +136,6 @@ func (r *Root) updateWithDynamicValue(nv dyn.Value) error {
// Assign the normalized configuration tree.
r.value = nv
// At the moment the check has to be done as part of updateWithDynamicValue
// because otherwise ConfigureConfigFilePath will fail with a panic.
// In the future, we should move this check to a separate mutator in initialise phase.
err = r.Resources.VerifyAllResourcesDefined()
if err != nil {
return err
}
// Assign config file paths after converting to typed configuration.
r.ConfigureConfigFilePath()
return nil
}
@ -238,15 +227,6 @@ func (r *Root) MarkMutatorExit(ctx context.Context) error {
return nil
}
// SetConfigFilePath configures the path that its configuration
// was loaded from in configuration leafs that require it.
func (r *Root) ConfigureConfigFilePath() {
r.Resources.ConfigureConfigFilePath()
if r.Artifacts != nil {
r.Artifacts.ConfigureConfigFilePath()
}
}
// Initializes variables using values passed from the command line flag
// Input has to be a string of the form `foo=bar`. In this case the variable with
// name `foo` is assigned the value `bar`

View File

@ -0,0 +1,47 @@
package validate
import (
"context"
"fmt"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
)
func AllResourcesHaveValues() bundle.Mutator {
return &allResourcesHaveValues{}
}
type allResourcesHaveValues struct{}
func (m *allResourcesHaveValues) Name() string {
return "validate:AllResourcesHaveValues"
}
func (m *allResourcesHaveValues) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
rv := b.Config.Value().Get("resources")
// Skip if there are no resources block defined, or the resources block is empty.
if rv.Kind() == dyn.KindInvalid || rv.Kind() == dyn.KindNil {
return nil
}
_, err := dyn.MapByPattern(
rv,
dyn.NewPattern(dyn.AnyKey(), dyn.AnyKey()),
func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil {
// Type of the resource, stripped of the trailing 's' to make it
// singular.
rType := strings.TrimSuffix(p[0].Key(), "s")
rName := p[1].Key()
return v, fmt.Errorf("%s %s is not defined", rType, rName)
}
return v, nil
},
)
return diag.FromErr(err)
}

View File

@ -39,7 +39,8 @@ func (m *compute) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
for name, job := range b.Config.Resources.Jobs {
// Compute config file path the job is defined in, relative to the bundle
// root
relativePath, err := filepath.Rel(b.RootPath, job.ConfigFilePath)
l := b.Config.GetLocation("resources.jobs." + name)
relativePath, err := filepath.Rel(b.RootPath, l.File)
if err != nil {
return diag.Errorf("failed to compute relative path for job %s: %v", name, err)
}

View File

@ -29,6 +29,4 @@ func SetLocation(b *bundle.Bundle, prefix string, filePath string) {
return v, dyn.ErrSkip
})
})
b.Config.ConfigureConfigFilePath()
}

View File

@ -5,6 +5,7 @@ import (
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
"github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/deploy/metadata"
"github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/bundle/permissions"
@ -19,6 +20,7 @@ func Initialize() bundle.Mutator {
return newPhase(
"initialize",
[]bundle.Mutator{
validate.AllResourcesHaveValues(),
mutator.RewriteSyncPaths(),
mutator.MergeJobClusters(),
mutator.MergeJobParameters(),

View File

@ -7,7 +7,6 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/paths"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
@ -124,9 +123,6 @@ func TestNoPanicWithNoPythonWheelTasks(t *testing.T) {
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test": {
Paths: paths.Paths{
ConfigFilePath: tmpDir,
},
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
{

View File

@ -15,7 +15,8 @@ func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
l := b.Config.GetLocation("resources.pipelines.nyc_taxi_pipeline")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(l.File))
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
assert.True(t, p.Development)
require.Len(t, p.Libraries, 1)
@ -29,7 +30,8 @@ func TestJobAndPipelineStagingWithEnvironment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
l := b.Config.GetLocation("resources.pipelines.nyc_taxi_pipeline")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(l.File))
assert.False(t, p.Development)
require.Len(t, p.Libraries, 1)
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
@ -42,14 +44,16 @@ func TestJobAndPipelineProductionWithEnvironment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
pl := b.Config.GetLocation("resources.pipelines.nyc_taxi_pipeline")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(pl.File))
assert.False(t, p.Development)
require.Len(t, p.Libraries, 1)
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
assert.Equal(t, "nyc_taxi_production", p.Target)
j := b.Config.Resources.Jobs["pipeline_schedule"]
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath))
jl := b.Config.GetLocation("resources.jobs.pipeline_schedule")
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(jl.File))
assert.Equal(t, "Daily refresh of production pipeline", j.Name)
require.Len(t, j.Tasks, 1)
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)

View File

@ -31,7 +31,8 @@ func TestIncludeWithGlob(t *testing.T) {
job := b.Config.Resources.Jobs["my_job"]
assert.Equal(t, "1", job.ID)
assert.Equal(t, "include_with_glob/job.yml", filepath.ToSlash(job.ConfigFilePath))
l := b.Config.GetLocation("resources.jobs.my_job")
assert.Equal(t, "include_with_glob/job.yml", filepath.ToSlash(l.File))
}
func TestIncludeDefault(t *testing.T) {
@ -51,9 +52,11 @@ func TestIncludeForMultipleMatches(t *testing.T) {
first := b.Config.Resources.Jobs["my_first_job"]
assert.Equal(t, "1", first.ID)
assert.Equal(t, "include_multiple/my_first_job/resource.yml", filepath.ToSlash(first.ConfigFilePath))
fl := b.Config.GetLocation("resources.jobs.my_first_job")
assert.Equal(t, "include_multiple/my_first_job/resource.yml", filepath.ToSlash(fl.File))
second := b.Config.Resources.Jobs["my_second_job"]
assert.Equal(t, "2", second.ID)
assert.Equal(t, "include_multiple/my_second_job/resource.yml", filepath.ToSlash(second.ConfigFilePath))
sl := b.Config.GetLocation("resources.jobs.my_second_job")
assert.Equal(t, "include_multiple/my_second_job/resource.yml", filepath.ToSlash(sl.File))
}

View File

@ -1,7 +1,6 @@
package config_tests
import (
"path/filepath"
"testing"
"github.com/databricks/cli/bundle/config"
@ -15,7 +14,6 @@ func TestJobAndPipelineDevelopment(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
assert.True(t, p.Development)
require.Len(t, p.Libraries, 1)
@ -29,7 +27,6 @@ func TestJobAndPipelineStaging(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
assert.False(t, p.Development)
require.Len(t, p.Libraries, 1)
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
@ -42,14 +39,12 @@ func TestJobAndPipelineProduction(t *testing.T) {
assert.Len(t, b.Config.Resources.Pipelines, 1)
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
assert.False(t, p.Development)
require.Len(t, p.Libraries, 1)
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
assert.Equal(t, "nyc_taxi_production", p.Target)
j := b.Config.Resources.Jobs["pipeline_schedule"]
assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath))
assert.Equal(t, "Daily refresh of production pipeline", j.Name)
require.Len(t, j.Tasks, 1)
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)

View File

@ -1,7 +1,6 @@
package config_tests
import (
"path/filepath"
"testing"
"github.com/databricks/cli/bundle/config"
@ -10,7 +9,6 @@ import (
)
func assertExpected(t *testing.T, p *resources.ModelServingEndpoint) {
assert.Equal(t, "model_serving_endpoint/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
assert.Equal(t, "model-name", p.Config.ServedModels[0].ModelName)
assert.Equal(t, "1", p.Config.ServedModels[0].ModelVersion)
assert.Equal(t, "model-name-1", p.Config.TrafficConfig.Routes[0].ServedModelName)

View File

@ -1,7 +1,6 @@
package config_tests
import (
"path/filepath"
"testing"
"github.com/databricks/cli/bundle/config"
@ -10,7 +9,6 @@ import (
)
func assertExpectedModel(t *testing.T, p *resources.RegisteredModel) {
assert.Equal(t, "registered_model/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
assert.Equal(t, "main", p.CatalogName)
assert.Equal(t, "default", p.SchemaName)
assert.Equal(t, "comment", p.Comment)

View File

@ -1,12 +1,22 @@
package config_tests
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/validate"
"github.com/stretchr/testify/assert"
)
func TestUndefinedJobLoadsWithError(t *testing.T) {
_, diags := loadTargetWithDiags("./undefined_job", "default")
b := load(t, "./undefined_job")
diags := bundle.Apply(context.Background(), b, validate.AllResourcesHaveValues())
assert.ErrorContains(t, diags.Error(), "job undefined is not defined")
}
func TestUndefinedPipelineLoadsWithError(t *testing.T) {
b := load(t, "./undefined_pipeline")
diags := bundle.Apply(context.Background(), b, validate.AllResourcesHaveValues())
assert.ErrorContains(t, diags.Error(), "pipeline undefined is not defined")
}

View File

@ -0,0 +1,8 @@
bundle:
name: undefined-pipeline
resources:
pipelines:
undefined:
test:
name: "Test Pipeline"

View File

@ -9,6 +9,12 @@ import (
"github.com/databricks/cli/libs/dyn/dynvar"
)
// Populate a destination typed value from a source dynamic value.
//
// At any point while walking the destination type tree using
// reflection, if this function sees an exported field with type dyn.Value it
// will populate that field with the appropriate source dynamic value.
// see PR: https://github.com/databricks/cli/pull/1010
func ToTyped(dst any, src dyn.Value) error {
dstv := reflect.ValueOf(dst)